diff --git a/.github/test-mvn-deploy/pom.xml b/.github/test-mvn-deploy/pom.xml index d7ccbafa7f..a7f2e726a0 100644 --- a/.github/test-mvn-deploy/pom.xml +++ b/.github/test-mvn-deploy/pom.xml @@ -1,29 +1,28 @@ - - 4.0.0 + + + 4.0.0 - com.apicurio.test - verify-registry-maven-deploy - 1.0.0-SNAPSHOT - verify-registry-maven-deploy - Simple Project to verify the deployment of Apicurio Registry JARS + com.apicurio.test + verify-registry-maven-deploy + 1.0.0-SNAPSHOT + verify-registry-maven-deploy + Simple Project to verify the deployment of Apicurio Registry JARS - - - TBD - + + + TBD + - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${version.apicurio} - - - io.apicurio - apicurio-registry-java-sdk - ${version.apicurio} - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${version.apicurio} + + + io.apicurio + apicurio-registry-java-sdk + ${version.apicurio} + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b544352f04..2e4c264f42 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,8 +1,8 @@ # Contributing guide -**Want to contribute? Great!** +**Want to contribute? Great!** We try to make it easy, and all contributions, even the smaller ones, are more than welcome. -This includes bug reports, fixes, documentation, examples... +This includes bug reports, fixes, documentation, examples... But first, read this page (including the small print at the end). * [Legal](#legal) @@ -66,7 +66,7 @@ Because we are all humans, and to ensure Apicurio Registry is stable for everyon ### Tests and documentation are not optional -Don't forget to include tests in your pull requests. +Don't forget to include tests in your pull requests. Also don't forget the documentation (reference documentation, javadoc...). Be sure to test your pull request using all storage variants: @@ -97,4 +97,3 @@ The important parts are as follows: ## The small print This project is an open source project. Please act responsibly, be nice, polite and enjoy! - diff --git a/README.md b/README.md index a6c9f6347d..e20af037a6 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ An API/Schema registry - stores and retrieves APIs and Schemas. This project supports several build configuration options that affect the produced executables. -By default, `mvn clean install` produces an executable JAR with the *dev* Quarkus configuration profile enabled, and *in-memory* persistence implementation. +By default, `mvn clean install` produces an executable JAR with the *dev* Quarkus configuration profile enabled, and *in-memory* persistence implementation. Apicurio Registry supports 4 persistence implementations: - In-Memory @@ -34,7 +34,7 @@ For this property, there are three possible values: Additionally, there are 2 main configuration profiles: - *dev* - suitable for development, and - *prod* - for production environment. - + ### Getting started (APIs) ``` @@ -42,7 +42,7 @@ Additionally, there are 2 main configuration profiles: cd app/ ../mvnw quarkus:dev ``` - + This should result in Quarkus and the in-memory registry starting up, with the REST APIs available on localhost port 8080: * [API documentation](http://localhost:8080/apis) @@ -78,7 +78,7 @@ The following parameters are available for executable files: ### SQL - By default, the application expects an H2 server running at `jdbc:h2:tcp://localhost:9123/mem:registry`. - For configuring the database kind and the datasource values, the following configuration options are available: - + | Option | Command argument | Env. variable | |---------------------------|---------------------------------|--------------------------------| | Registry SQL storage kind | `-Dapicurio.storage.sql.kind` | `APICURIO_STORAGE_SQL_KIND` | @@ -87,8 +87,8 @@ The following parameters are available for executable files: | DS Password | `-Dapicurio.datasource.password` | `APICURIO_DATASOURCE_PASSWORD` | To see additional options, visit: - - [Data Source config](https://quarkus.io/guides/datasource) - - [Data Source options](https://quarkus.io/guides/datasource-guide#configuration-reference) + - [Data Source config](https://quarkus.io/guides/datasource) + - [Data Source options](https://quarkus.io/guides/datasource-guide#configuration-reference) ### KafkaSQL `./mvnw clean install -Pprod -DskipTests` builds the application artifact. @@ -119,7 +119,7 @@ java \ This will start up the registry with the persistence managed by the external kafka cluster. ## Docker containers -Every time a commit is pushed to `main` an updated docker image is built and pushed to Docker +Every time a commit is pushed to `main` an updated docker image is built and pushed to Docker Hub. The image can be found in: * [apicurio-registry](https://hub.docker.com/r/apicurio/apicurio-registry) @@ -128,8 +128,8 @@ Run the above docker image like this: docker run -it -p 8080:8080 apicurio/apicurio-registry:latest-snapshot -The same configuration options are available for the docker containers, but only in the form of environment -variables (The command line parameters are for the `java` executable and at the moment it's not possible to +The same configuration options are available for the docker containers, but only in the form of environment +variables (The command line parameters are for the `java` executable and at the moment it's not possible to pass them into the container). Each docker image will support the environment variable configuration options documented above for their respective storage type. @@ -161,7 +161,7 @@ Run Apicurio Registry with Postgres: - Compile using `mvn clean install -DskipTests -Pprod -Ddocker` - - Then create a docker-compose file `test.yml`: + - Then create a docker-compose file `test.yml`: ```yaml version: '3.1' @@ -187,7 +187,7 @@ services: ## Security You can enable authentication for both the application REST APIs and the user interface using a server based -on OpenID Connect (OIDC). The same server realm and users are federated across the user interface and the +on OpenID Connect (OIDC). The same server realm and users are federated across the user interface and the REST APIs using Open ID Connect so that you only require one set of credentials. In order no enable this integration, you will need to set the following environment variables. @@ -221,7 +221,7 @@ For more information see the documentation on [how to configure security in Regi ## Eclipse IDE Some notes about using the Eclipse IDE with the Apicurio Registry codebase. Before -importing the registry into your workspace, we recommend some configuration of the +importing the registry into your workspace, we recommend some configuration of the Eclipse IDE. ### Lombok Integration @@ -239,7 +239,7 @@ We use the **maven-dependency-plugin** in a few places to unpack a maven module reactor into another module. For example, the `app` module unpacks the contents of the `ui` module to include/embed the user interface into the running application. Eclipse does not like this. To fix this, configure the Eclipse Maven "Lifecycle Mappings" -to ignore the usage of **maven-dependency-plugin**. +to ignore the usage of **maven-dependency-plugin**. * Open up **Window->Preferences** * Choose **Maven->Lifecycle Mappings** @@ -264,7 +264,7 @@ to ignore the usage of **maven-dependency-plugin**. ``` -* Now go back into **Maven->Lifecycle Mappings** -> **Maven->Lifecycle Mappings** and click +* Now go back into **Maven->Lifecycle Mappings** -> **Maven->Lifecycle Mappings** and click the **Reload workspace lifecycle mappings metadata** button. * If you've already imported the Apicurio projects, select all of them and choose **Maven->Update Project**. @@ -273,5 +273,5 @@ the **Reload workspace lifecycle mappings metadata** button. We use some Google Protobuf files and a maven plugin to generate some Java classes that get stored in various modules' `target` directories. These are then recognized by m2e but are sometimes deleted during the Eclipse "clean" phase. To prevent Eclipse from -over-cleaning these files, find the **os-maven-plugin-1.6.2.jar** JAR in your +over-cleaning these files, find the **os-maven-plugin-1.6.2.jar** JAR in your `.m2/repository` directory and copy it into `$ECLIPSE_HOME/dropins`. diff --git a/TESTING.md b/TESTING.md index b4d49b2de6..edce27f0e2 100644 --- a/TESTING.md +++ b/TESTING.md @@ -9,7 +9,7 @@ Apicurio Registry testsuite has various types of tests: unit tests and integrati Quick tests that verify specific functionalities or components of the application. Each maven module can have it's own set of unit tests. For the Apicurio Registry app they can be found in `app/src/test` -Because Apicurio Registry is a Quarkus application we use `@QuarkusTest` for the unit tests, that allow us to run multiple different configurations of +Because Apicurio Registry is a Quarkus application we use `@QuarkusTest` for the unit tests, that allow us to run multiple different configurations of the application, easily provide mocks or external dependencies... QuarkusTest allows us to easily verify feature flags or config properties that change completely the behavior of the application. In order to do that we use `@QuarkusTestProfile` quite often. Unit tests are executed as part of the project build. You can build the project and run the tests by executing this command: diff --git a/app/pom.xml b/app/pom.xml index f7a4382265..2a0be34d7c 100644 --- a/app/pom.xml +++ b/app/pom.xml @@ -1,590 +1,587 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-app - jar - apicurio-registry-app + apicurio-registry-app + jar + apicurio-registry-app - - - + + + - - - - io.apicurio - apicurio-registry-common - + + + + io.apicurio + apicurio-registry-common + - - io.apicurio - apicurio-registry-utils-kafka - + + io.apicurio + apicurio-registry-utils-kafka + - - io.apicurio - apicurio-registry-schema-util-provider - + + io.apicurio + apicurio-registry-schema-util-provider + - - - io.apicurio - apicurio-common-app-components-core - + + + io.apicurio + apicurio-common-app-components-core + - - io.apicurio - apicurio-common-app-components-logging - + + io.apicurio + apicurio-common-app-components-logging + - - io.apicurio - apicurio-common-app-components-config - + + io.apicurio + apicurio-common-app-components-config + - - io.apicurio - apicurio-common-app-components-config-index - + + io.apicurio + apicurio-common-app-components-config-index + - - io.apicurio - apicurio-common-app-components-auth - - - io.apicurio - apicurio-common-rest-client-jdk - test - + + io.apicurio + apicurio-common-app-components-auth + + + io.apicurio + apicurio-common-rest-client-jdk + test + - - io.apicurio - apicurio-common-rest-client-vertx - + + io.apicurio + apicurio-common-rest-client-vertx + - - io.apicurio - apicurio-registry-utils-import-export - + + io.apicurio + apicurio-registry-utils-import-export + - - io.apicurio - apicurio-registry-protobuf-schema-utilities - + + io.apicurio + apicurio-registry-protobuf-schema-utilities + - - - io.quarkus - quarkus-undertow - - - io.quarkus - quarkus-jackson - - - io.quarkus - quarkus-resteasy-jackson - - - io.quarkus - quarkus-smallrye-health - - - io.quarkus - quarkus-micrometer-registry-prometheus - - - io.quarkus - quarkus-vertx - - - io.quarkus - quarkus-oidc - - - io.quarkus - quarkus-elytron-security-properties-file - - - io.quarkus - quarkus-smallrye-jwt - - - io.quarkus - quarkus-scheduler - - - io.quarkus - quarkus-smallrye-context-propagation - - - io.quarkus - quarkus-resteasy-client - - - io.quarkus - quarkus-resteasy-client-jackson - - - io.quarkus - quarkus-logging-json - + + + io.quarkus + quarkus-undertow + + + io.quarkus + quarkus-jackson + + + io.quarkus + quarkus-resteasy-jackson + + + io.quarkus + quarkus-smallrye-health + + + io.quarkus + quarkus-micrometer-registry-prometheus + + + io.quarkus + quarkus-vertx + + + io.quarkus + quarkus-oidc + + + io.quarkus + quarkus-elytron-security-properties-file + + + io.quarkus + quarkus-smallrye-jwt + + + io.quarkus + quarkus-scheduler + + + io.quarkus + quarkus-smallrye-context-propagation + + + io.quarkus + quarkus-resteasy-client + + + io.quarkus + quarkus-resteasy-client-jackson + + + io.quarkus + quarkus-logging-json + - - io.quarkus - quarkus-smallrye-fault-tolerance - + + io.quarkus + quarkus-smallrye-fault-tolerance + - - - io.quarkus - quarkus-agroal - - - io.quarkus - quarkus-jdbc-postgresql - - - io.quarkus - quarkus-jdbc-h2 - - - io.quarkus - quarkus-jdbc-mssql - + + + io.quarkus + quarkus-agroal + + + io.quarkus + quarkus-jdbc-postgresql + + + io.quarkus + quarkus-jdbc-h2 + + + io.quarkus + quarkus-jdbc-mssql + - - org.eclipse.jgit - org.eclipse.jgit - + + org.eclipse.jgit + org.eclipse.jgit + - - commons-io - commons-io - - - io.strimzi - kafka-oauth-client - + + commons-io + commons-io + + + io.strimzi + kafka-oauth-client + - - - commons-codec - commons-codec - - - org.apache.commons - commons-lang3 - - - org.projectlombok - lombok - compile - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - - - snakeyaml - org.yaml - - - + + + commons-codec + commons-codec + + + org.apache.commons + commons-lang3 + + + org.projectlombok + lombok + compile + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + + org.yaml + snakeyaml + + + - - org.yaml - snakeyaml - ${snakeyaml.version} - + + org.yaml + snakeyaml + ${snakeyaml.version} + - - com.google.guava - guava - + + com.google.guava + guava + - - - io.confluent - kafka-avro-serializer - test - - - io.confluent - kafka-protobuf-serializer - test - - - io.confluent - kafka-json-schema-serializer - test - - - io.confluent - kafka-connect-avro-converter - test - - - io.quarkus - quarkus-junit5 - test - - - io.quarkus - quarkus-test-common - test - - - org.junit.jupiter - junit-jupiter - test - - - org.junit.jupiter - junit-jupiter-params - test - - - io.rest-assured - rest-assured - test - - - io.apicurio - apicurio-registry-serdes-avro-serde - test - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - test - - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - test - - - io.apicurio - apicurio-registry-maven-plugin - maven-plugin - test - - - io.apicurio - apicurio-registry-utils-tests - test - - - io.apicurio - apicurio-registry-utils-kafka - test - test-jar - - - io.strimzi - strimzi-test-container - test - - - io.zonky.test - embedded-postgres - ${embedded-postgres.version} - test - - - org.testcontainers - mssqlserver - test - - - org.awaitility - awaitility - test - - + + + io.confluent + kafka-avro-serializer + test + + + io.confluent + kafka-protobuf-serializer + test + + + io.confluent + kafka-json-schema-serializer + test + + + io.confluent + kafka-connect-avro-converter + test + + + io.quarkus + quarkus-junit5 + test + + + io.quarkus + quarkus-test-common + test + + + org.junit.jupiter + junit-jupiter + test + + + org.junit.jupiter + junit-jupiter-params + test + + + io.rest-assured + rest-assured + test + + + io.apicurio + apicurio-registry-serdes-avro-serde + test + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + test + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + test + + + io.apicurio + apicurio-registry-maven-plugin + maven-plugin + test + + + io.apicurio + apicurio-registry-utils-tests + test + + + io.apicurio + apicurio-registry-utils-kafka + test-jar + test + + + io.strimzi + strimzi-test-container + test + + + io.zonky.test + embedded-postgres + ${embedded-postgres.version} + + test + + + org.testcontainers + mssqlserver + test + + + org.awaitility + awaitility + test + + - - - - src/main/resources - true - - - src/main/resources-unfiltered - false - - - - - src/test/resources - true - - - src/test/resources-unfiltered - false - - - - - io.quarkus - quarkus-maven-plugin - - - - build - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - ${skipTests} - ${skipAppTests} - ${groups} - - org.jboss.logmanager.LogManager - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack-openapi - generate-resources - - unpack - - - - - ${project.groupId} - apicurio-registry-common - ${project.version} - jar - true - **/openapi.json - - - ${project.build.outputDirectory} - false - true - - - - - - io.apicurio - apicurio-common-app-components-maven-plugin - ${apicurio-common-app-components.version} - - - merge-test-properties - process-test-classes - - merge - - - ${project.build.testOutputDirectory}/application.properties - - ${project.build.outputDirectory}/application.properties - ${project.build.outputDirectory}/application-prod.properties - ${project.build.outputDirectory}/application-test.properties - - false - - - - merge-properties - prepare-package - - merge - - - ${project.build.outputDirectory}/application.properties - - ${project.build.outputDirectory}/application.properties - ${project.build.outputDirectory}/application-prod.properties - ${project.build.outputDirectory}/application-test.properties - - true - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - assembly - package - - single - - - ${project.artifactId}-${project.version} - true - - src/main/assembly/assembly.xml - - - 0755 - - ${tar.long.file.mode} - - - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + + true + src/main/resources + + + false + src/main/resources-unfiltered + + + + + true + src/test/resources + + + false + src/test/resources-unfiltered + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + ${skipTests} + ${skipAppTests} + ${groups} + + org.jboss.logmanager.LogManager + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + unpack-openapi + + unpack + + generate-resources + + + + ${project.groupId} + apicurio-registry-common + ${project.version} + jar + true + **/openapi.json + + + ${project.build.outputDirectory} + false + true + + + + + + io.apicurio + apicurio-common-app-components-maven-plugin + ${apicurio-common-app-components.version} + + + merge-test-properties + + merge + + process-test-classes + + ${project.build.testOutputDirectory}/application.properties + + ${project.build.outputDirectory}/application.properties + ${project.build.outputDirectory}/application-prod.properties + ${project.build.outputDirectory}/application-test.properties + + false + + + + merge-properties + + merge + + prepare-package + + ${project.build.outputDirectory}/application.properties + + ${project.build.outputDirectory}/application.properties + ${project.build.outputDirectory}/application-prod.properties + ${project.build.outputDirectory}/application-test.properties + + true + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + assembly + + single + + package + + ${project.artifactId}-${project.version} + true + + src/main/assembly/assembly.xml + + + 0755 + + ${tar.long.file.mode} + + + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - test-compile - - - ./src/test/resources/schema - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - org.apache.avro - avro-maven-plugin - ${avro.version} - - - generate-sources - - schema - - - String - - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaE.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaA.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaD.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaC.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaB.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/LeadFallErstellen.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/ccompat/rest/subrecord.avsc - ${project.basedir}/src/test/resources/io/apicurio/registry/ccompat/rest/record.avsc - - ${project.basedir}/src/test/resources/io/apicurio/registry/serde/ - ${project.basedir}/target/generated-test-sources - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-dist - prepare-package - - copy-resources - - - ${project.build.outputDirectory} - - - ${project.basedir}/target/generated-test-sources/protobuf/ - false - - - - - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + test-compile + + generate-sources + + ./src/test/resources/schema + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + + schema + + generate-sources + + String + + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaE.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaA.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaD.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaC.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/AvroSchemaB.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/LeadFallErstellen.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/ccompat/rest/subrecord.avsc + ${project.basedir}/src/test/resources/io/apicurio/registry/ccompat/rest/record.avsc + + ${project.basedir}/src/test/resources/io/apicurio/registry/serde/ + ${project.basedir}/target/generated-test-sources + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-dist + + copy-resources + + prepare-package + + ${project.build.outputDirectory} + + + ${project.basedir}/target/generated-test-sources/protobuf/ + false + + + + + + + + + - - - native - - - native - - - - - - maven-failsafe-plugin - - - - integration-test - verify - - - - ${project.build.directory}/${project.build.finalName}-runner - org.jboss.logmanager.LogManager - ${maven.home} - - - - - - - + + + native + + + native + + true false - - + + + + + maven-failsafe-plugin + + + + integration-test + verify + + + + ${project.build.directory}/${project.build.finalName}-runner + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + + + - + diff --git a/app/src/main/java/io/apicurio/registry/ApicurioRegisterForReflection.java b/app/src/main/java/io/apicurio/registry/ApicurioRegisterForReflection.java index c149b3f496..34abf77905 100644 --- a/app/src/main/java/io/apicurio/registry/ApicurioRegisterForReflection.java +++ b/app/src/main/java/io/apicurio/registry/ApicurioRegisterForReflection.java @@ -3,809 +3,797 @@ import io.apicurio.rest.client.auth.AccessTokenResponse; import io.quarkus.runtime.annotations.RegisterForReflection; -@RegisterForReflection(targets = { - AccessTokenResponse.class, - // Needed for the JAXRSClient - org.apache.commons.logging.LogFactory.class, - org.apache.commons.logging.impl.LogFactoryImpl.class, - org.apache.commons.logging.impl.SimpleLog.class, - io.agroal.pool.ConnectionHandler[].class, - // The following list is generated running `jbang cli/tools/extractRegisterForReflection.java` - io.apicurio.datamodels.models.Components.class, - io.apicurio.datamodels.models.Contact.class, - io.apicurio.datamodels.models.Document.class, - io.apicurio.datamodels.models.ExternalDocumentation.class, - io.apicurio.datamodels.models.Info.class, - io.apicurio.datamodels.models.License.class, - io.apicurio.datamodels.models.NodeImpl.class, - io.apicurio.datamodels.models.OAuthFlow.class, - io.apicurio.datamodels.models.OAuthFlows.class, - io.apicurio.datamodels.models.Operation.class, - io.apicurio.datamodels.models.Parameter.class, - io.apicurio.datamodels.models.RootNode.class, - io.apicurio.datamodels.models.RootNodeImpl.class, - io.apicurio.datamodels.models.Schema.class, - io.apicurio.datamodels.models.SecurityRequirement.class, - io.apicurio.datamodels.models.SecurityScheme.class, - io.apicurio.datamodels.models.Server.class, - io.apicurio.datamodels.models.ServerVariable.class, - io.apicurio.datamodels.models.Tag.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiBinding.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiChannelItem.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiChannels.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiComponents.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiContact.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiCorrelationID.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiDocument.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiInfo.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiLicense.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiMessage.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiMessageBindings.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiMessageExample.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiMessageTrait.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOperation.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOperationBindings.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiOperationTrait.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiParameter.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiParameters.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiSchema.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiSecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiSecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiServer.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiServerBindings.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiServerVariable.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiServers.class, - io.apicurio.datamodels.models.asyncapi.AsyncApiTag.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Binding.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Channels.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Components.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Contact.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Document.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Info.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20License.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Message.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Operation.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Parameter.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Parameters.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Schema.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Server.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Servers.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Tag.class, - io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20TagImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Binding.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Channels.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Components.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Contact.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Document.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Info.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21License.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Message.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Operation.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Parameter.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Parameters.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Schema.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Server.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Servers.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Tag.class, - io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21TagImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Binding.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Channels.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Components.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Contact.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Document.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Info.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22License.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Message.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageExample.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageExampleImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Operation.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Parameter.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Parameters.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Schema.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Server.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Servers.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Tag.class, - io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22TagImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Binding.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Channels.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Components.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Contact.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Document.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Info.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23License.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Message.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageExample.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageExampleImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Operation.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Parameter.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Parameters.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Schema.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Server.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Servers.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Tag.class, - io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23TagImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Binding.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Channels.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Components.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Contact.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Document.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Info.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24License.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Message.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageExample.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageExampleImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Operation.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Parameter.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Parameters.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Schema.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Server.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Servers.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Tag.class, - io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24TagImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Binding.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25BindingImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelBindings.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelItem.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelItemImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Channels.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Components.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ComponentsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Contact.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ContactImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25CorrelationID.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25CorrelationIDImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Document.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25DocumentImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ExternalDocumentation.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Info.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25InfoImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25License.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25LicenseImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Message.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageBindings.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageExample.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageExampleImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageTrait.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlow.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlowImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlows.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlowsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OneOfMessages.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OneOfMessagesImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Operation.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationBindings.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationTrait.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationTraitImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Parameter.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ParameterImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Parameters.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ParametersImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Schema.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SchemaImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityRequirement.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityRequirementImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityScheme.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecuritySchemeImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Server.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerBindings.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerBindingsImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerVariable.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerVariableImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Servers.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServersImpl.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Tag.class, - io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25TagImpl.class, - io.apicurio.datamodels.models.openapi.OpenApiCallback.class, - io.apicurio.datamodels.models.openapi.OpenApiComponents.class, - io.apicurio.datamodels.models.openapi.OpenApiContact.class, - io.apicurio.datamodels.models.openapi.OpenApiDiscriminator.class, - io.apicurio.datamodels.models.openapi.OpenApiDocument.class, - io.apicurio.datamodels.models.openapi.OpenApiEncoding.class, - io.apicurio.datamodels.models.openapi.OpenApiExample.class, - io.apicurio.datamodels.models.openapi.OpenApiExternalDocumentation.class, - io.apicurio.datamodels.models.openapi.OpenApiHeader.class, - io.apicurio.datamodels.models.openapi.OpenApiInfo.class, - io.apicurio.datamodels.models.openapi.OpenApiLicense.class, - io.apicurio.datamodels.models.openapi.OpenApiLink.class, - io.apicurio.datamodels.models.openapi.OpenApiMediaType.class, - io.apicurio.datamodels.models.openapi.OpenApiOAuthFlow.class, - io.apicurio.datamodels.models.openapi.OpenApiOAuthFlows.class, - io.apicurio.datamodels.models.openapi.OpenApiOperation.class, - io.apicurio.datamodels.models.openapi.OpenApiParameter.class, - io.apicurio.datamodels.models.openapi.OpenApiPathItem.class, - io.apicurio.datamodels.models.openapi.OpenApiPaths.class, - io.apicurio.datamodels.models.openapi.OpenApiRequestBody.class, - io.apicurio.datamodels.models.openapi.OpenApiResponse.class, - io.apicurio.datamodels.models.openapi.OpenApiResponses.class, - io.apicurio.datamodels.models.openapi.OpenApiSchema.class, - io.apicurio.datamodels.models.openapi.OpenApiSecurityRequirement.class, - io.apicurio.datamodels.models.openapi.OpenApiSecurityScheme.class, - io.apicurio.datamodels.models.openapi.OpenApiServer.class, - io.apicurio.datamodels.models.openapi.OpenApiServerVariable.class, - io.apicurio.datamodels.models.openapi.OpenApiTag.class, - io.apicurio.datamodels.models.openapi.OpenApiXML.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Contact.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ContactImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Definitions.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20DefinitionsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Document.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20DocumentImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Example.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ExampleImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ExternalDocumentation.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Header.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20HeaderImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Headers.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20HeadersImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Info.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20InfoImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Items.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ItemsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20License.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20LicenseImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Operation.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20OperationImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Parameter.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterDefinitions.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterDefinitionsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20PathItem.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20PathItemImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Paths.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20PathsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Response.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseDefinitions.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseDefinitionsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Responses.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponsesImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Schema.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SchemaImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Scopes.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20ScopesImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityDefinitions.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityDefinitionsImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityRequirement.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityRequirementImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityScheme.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20SecuritySchemeImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20Tag.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20TagImpl.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20XML.class, - io.apicurio.datamodels.models.openapi.v20.OpenApi20XMLImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Callback.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30CallbackImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Components.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ComponentsImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Contact.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ContactImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Discriminator.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30DiscriminatorImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Document.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30DocumentImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Encoding.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30EncodingImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Example.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ExampleImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ExternalDocumentation.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Header.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30HeaderImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Info.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30InfoImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30License.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30LicenseImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Link.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30LinkImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30MediaType.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30MediaTypeImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlow.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlowImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlows.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlowsImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Operation.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30OperationImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Parameter.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ParameterImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30PathItem.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30PathItemImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Paths.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30PathsImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30RequestBody.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30RequestBodyImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Response.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ResponseImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Responses.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ResponsesImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Schema.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30SchemaImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityRequirement.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityRequirementImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityScheme.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30SecuritySchemeImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Server.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerVariable.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerVariableImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30Tag.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30TagImpl.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30XML.class, - io.apicurio.datamodels.models.openapi.v30.OpenApi30XMLImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Callback.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31CallbackImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Components.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ComponentsImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Contact.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ContactImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Discriminator.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31DiscriminatorImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Document.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31DocumentImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Encoding.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31EncodingImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Example.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ExampleImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ExternalDocumentation.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ExternalDocumentationImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Header.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31HeaderImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Info.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31InfoImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31License.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31LicenseImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Link.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31LinkImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31MediaType.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31MediaTypeImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlow.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlowImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlows.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlowsImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Operation.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31OperationImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Parameter.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ParameterImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31PathItem.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31PathItemImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Paths.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31PathsImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31RequestBody.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31RequestBodyImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Response.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ResponseImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Responses.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ResponsesImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Schema.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31SchemaImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityRequirement.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityRequirementImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityScheme.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31SecuritySchemeImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Server.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerVariable.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerVariableImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31Tag.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31TagImpl.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31XML.class, - io.apicurio.datamodels.models.openapi.v31.OpenApi31XMLImpl.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidApiDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidContactEmailRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidContactUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidExternalDocsDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidLicenseUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidServerDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidServerUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidTagDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.InvalidTermsOfServiceUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidApiBasePathRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidApiHostRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidExampleDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidExternalDocsUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidHeaderDefaultValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidHeaderDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidLinkDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthAuthorizationUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthRefreshUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthTokenUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOpenIDConnectUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationConsumesRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationProducesRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidParameterDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidPathItemDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidRequestBodyDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidResponseDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSchemaItemsDefaultValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeAuthUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeTokenUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidServerVariableDescriptionRule.class, - io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidXmlNamespaceUrlRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasDuplicatePathSegmentRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasEmptyPathSegmentRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasIdenticalPathTemplateRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidCallbackDefinitionNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidExampleDefinitionNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidHeaderDefinitionNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidHttpResponseCodeRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidLinkDefinitionNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidParameterDefNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidPathSegmentRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidPropertyNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidRequestBodyDefinitionNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidResponseDefNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidSchemaDefNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidScopeNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidSecuritySchemeNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasUnmatchedEncodingPropertyRule.class, - io.apicurio.datamodels.validation.rules.invalid.name.OasUnmatchedExampleTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidCallbackReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidExampleReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidHeaderReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidLinkOperationReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidLinkReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidParameterReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidPathItemReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidRequestBodyReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidResponseReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSchemaReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSecurityRequirementNameRule.class, - io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSecuritySchemeReferenceRule.class, - io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidPropertyTypeValidationRule.class, - io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidSchemaArrayItemsRule.class, - io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidSchemaTypeValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.AbstractInvalidPropertyValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasAllowReservedNotAllowedForParamRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasAllowReservedNotAllowedRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasEncodingStyleNotAllowedRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasExplodeNotAllowedRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasFormDataParamNotAllowedRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiConsumesMTRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiProducesMTRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiSchemeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidEncodingForMPMTRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidHeaderStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidHttpSecuritySchemeTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidLinkOperationIdRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidOperationIdRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidOperationSchemeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidSecurityReqScopesRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasMissingPathParamDefinitionRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasMissingResponseForOperationRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasOperationSummaryTooLongRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasPathParamNotFoundRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasRequiredParamWithDefaultValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasSecurityRequirementScopesMustBeEmptyRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasServerVarNotFoundInTemplateRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedArrayCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedHeaderCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedHeaderUsageRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedNumOfParamMTsRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedNumberOfHeaderMTsRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamAllowEmptyValueRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamMultiRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedRequestBodyRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedSecurityRequirementScopesRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedUsageOfBearerTokenRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedUsageOfDiscriminatorRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedXmlWrappingRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownApiKeyLocationRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownCookieParamStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownEncodingStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderParamStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownOauthFlowTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamCollectionFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamFormatRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamLocationRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamTypeRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownPathParamStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownQueryParamStyleRule.class, - io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownSecuritySchemeTypeRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasBodyAndFormDataMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasExampleValueMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasHeaderExamplesMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasHeaderSchemaContentMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasLinkOperationRefMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasMediaTypeExamplesMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasParameterExamplesMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.mutex.OasParameterSchemaContentMutualExclusivityRule.class, - io.apicurio.datamodels.validation.rules.other.OasBodyParameterUniquenessValidationRule.class, - io.apicurio.datamodels.validation.rules.other.OasIgnoredContentTypeHeaderRule.class, - io.apicurio.datamodels.validation.rules.other.OasIgnoredHeaderParameterRule.class, - io.apicurio.datamodels.validation.rules.other.OasOperationIdUniquenessValidationRule.class, - io.apicurio.datamodels.validation.rules.other.OasParameterUniquenessValidationRule.class, - io.apicurio.datamodels.validation.rules.other.OasUnknownPropertyRule.class, - io.apicurio.datamodels.validation.rules.other.SecurityRequirementUniquenessValidationRule.class, - io.apicurio.datamodels.validation.rules.other.TagUniquenessValidationRule.class, - io.apicurio.datamodels.validation.rules.required.AaMissingCorrelationIdRule.class, - io.apicurio.datamodels.validation.rules.required.AasMissingServerProtocolRule.class, - io.apicurio.datamodels.validation.rules.required.MissingApiKeySchemeParamLocationRule.class, - io.apicurio.datamodels.validation.rules.required.MissingApiKeySchemeParamNameRule.class, - io.apicurio.datamodels.validation.rules.required.MissingApiTitleRule.class, - io.apicurio.datamodels.validation.rules.required.MissingApiVersionRule.class, - io.apicurio.datamodels.validation.rules.required.MissingHttpSecuritySchemeTypeRule.class, - io.apicurio.datamodels.validation.rules.required.MissingLicenseNameRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowAuthUrlRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowRokenUrlRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowScopesRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOAuthSecuritySchemeFlowsRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOpenIdConnectSecuritySchemeConnectUrlRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOperationDescriptionRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOperationIdRule.class, - io.apicurio.datamodels.validation.rules.required.MissingOperationSummaryRule.class, - io.apicurio.datamodels.validation.rules.required.MissingSecuritySchemeTypeRule.class, - io.apicurio.datamodels.validation.rules.required.MissingServerTemplateUrlRule.class, - io.apicurio.datamodels.validation.rules.required.MissingTagNameRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingApiInformationRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingApiPathsRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingBodyParameterSchemaRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingDiscriminatorPropertyNameRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingExternalDocumentationUrlRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingHeaderArrayInformationRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingHeaderTypeRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingItemsArrayInformationRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingItemsTypeRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeAuthUrlRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeFlowTypeRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeScopesRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeTokenUrlRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOpenApiPropertyRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOperationResponsesRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingOperationTagsRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingParameterArrayTypeRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingParameterLocationRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingParameterNameRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingParameterTypeRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingRequestBodyContentRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingResponseDefinitionDescriptionRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingResponseDescriptionRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingSchemaArrayInformationRule.class, - io.apicurio.datamodels.validation.rules.required.OasMissingServerVarDefaultValueRule.class, - io.apicurio.datamodels.validation.rules.required.OasPathParamsMustBeRequiredRule.class +@RegisterForReflection(targets = { AccessTokenResponse.class, + // Needed for the JAXRSClient + org.apache.commons.logging.LogFactory.class, org.apache.commons.logging.impl.LogFactoryImpl.class, + org.apache.commons.logging.impl.SimpleLog.class, io.agroal.pool.ConnectionHandler[].class, + // The following list is generated running `jbang cli/tools/extractRegisterForReflection.java` + io.apicurio.datamodels.models.Components.class, io.apicurio.datamodels.models.Contact.class, + io.apicurio.datamodels.models.Document.class, + io.apicurio.datamodels.models.ExternalDocumentation.class, io.apicurio.datamodels.models.Info.class, + io.apicurio.datamodels.models.License.class, io.apicurio.datamodels.models.NodeImpl.class, + io.apicurio.datamodels.models.OAuthFlow.class, io.apicurio.datamodels.models.OAuthFlows.class, + io.apicurio.datamodels.models.Operation.class, io.apicurio.datamodels.models.Parameter.class, + io.apicurio.datamodels.models.RootNode.class, io.apicurio.datamodels.models.RootNodeImpl.class, + io.apicurio.datamodels.models.Schema.class, io.apicurio.datamodels.models.SecurityRequirement.class, + io.apicurio.datamodels.models.SecurityScheme.class, io.apicurio.datamodels.models.Server.class, + io.apicurio.datamodels.models.ServerVariable.class, io.apicurio.datamodels.models.Tag.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiBinding.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiChannelItem.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiChannels.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiComponents.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiContact.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiCorrelationID.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiDocument.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiInfo.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiLicense.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiMessage.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiMessageBindings.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiMessageExample.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiMessageTrait.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOperation.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOperationBindings.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiOperationTrait.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiParameter.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiParameters.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiSchema.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiSecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiSecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiServer.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiServerBindings.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiServerVariable.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiServers.class, + io.apicurio.datamodels.models.asyncapi.AsyncApiTag.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Binding.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Channels.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Components.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Contact.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Document.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Info.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20License.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Message.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Operation.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Parameter.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Parameters.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Schema.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Server.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Servers.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20Tag.class, + io.apicurio.datamodels.models.asyncapi.v20.AsyncApi20TagImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Binding.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Channels.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Components.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Contact.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Document.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Info.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21License.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Message.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Operation.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Parameter.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Parameters.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Schema.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Server.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Servers.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21Tag.class, + io.apicurio.datamodels.models.asyncapi.v21.AsyncApi21TagImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Binding.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Channels.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Components.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Contact.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Document.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Info.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22License.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Message.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageExample.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageExampleImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Operation.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Parameter.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Parameters.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Schema.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Server.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Servers.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22Tag.class, + io.apicurio.datamodels.models.asyncapi.v22.AsyncApi22TagImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Binding.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Channels.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Components.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Contact.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Document.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Info.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23License.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Message.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageExample.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageExampleImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Operation.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Parameter.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Parameters.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Schema.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Server.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Servers.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23Tag.class, + io.apicurio.datamodels.models.asyncapi.v23.AsyncApi23TagImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Binding.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Channels.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Components.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Contact.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Document.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Info.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24License.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Message.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageExample.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageExampleImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Operation.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Parameter.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Parameters.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Schema.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Server.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Servers.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24Tag.class, + io.apicurio.datamodels.models.asyncapi.v24.AsyncApi24TagImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Binding.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25BindingImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelBindings.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelItem.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelItemImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Channels.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ChannelsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Components.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ComponentsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Contact.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ContactImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25CorrelationID.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25CorrelationIDImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Document.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25DocumentImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ExternalDocumentation.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Info.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25InfoImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25License.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25LicenseImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Message.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageBindings.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageExample.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageExampleImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageTrait.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25MessageTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlow.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlowImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlows.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OAuthFlowsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OneOfMessages.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OneOfMessagesImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Operation.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationBindings.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationTrait.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25OperationTraitImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Parameter.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ParameterImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Parameters.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ParametersImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Schema.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SchemaImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityRequirement.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityRequirementImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecurityScheme.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25SecuritySchemeImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Server.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerBindings.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerBindingsImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerVariable.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServerVariableImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Servers.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25ServersImpl.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25Tag.class, + io.apicurio.datamodels.models.asyncapi.v25.AsyncApi25TagImpl.class, + io.apicurio.datamodels.models.openapi.OpenApiCallback.class, + io.apicurio.datamodels.models.openapi.OpenApiComponents.class, + io.apicurio.datamodels.models.openapi.OpenApiContact.class, + io.apicurio.datamodels.models.openapi.OpenApiDiscriminator.class, + io.apicurio.datamodels.models.openapi.OpenApiDocument.class, + io.apicurio.datamodels.models.openapi.OpenApiEncoding.class, + io.apicurio.datamodels.models.openapi.OpenApiExample.class, + io.apicurio.datamodels.models.openapi.OpenApiExternalDocumentation.class, + io.apicurio.datamodels.models.openapi.OpenApiHeader.class, + io.apicurio.datamodels.models.openapi.OpenApiInfo.class, + io.apicurio.datamodels.models.openapi.OpenApiLicense.class, + io.apicurio.datamodels.models.openapi.OpenApiLink.class, + io.apicurio.datamodels.models.openapi.OpenApiMediaType.class, + io.apicurio.datamodels.models.openapi.OpenApiOAuthFlow.class, + io.apicurio.datamodels.models.openapi.OpenApiOAuthFlows.class, + io.apicurio.datamodels.models.openapi.OpenApiOperation.class, + io.apicurio.datamodels.models.openapi.OpenApiParameter.class, + io.apicurio.datamodels.models.openapi.OpenApiPathItem.class, + io.apicurio.datamodels.models.openapi.OpenApiPaths.class, + io.apicurio.datamodels.models.openapi.OpenApiRequestBody.class, + io.apicurio.datamodels.models.openapi.OpenApiResponse.class, + io.apicurio.datamodels.models.openapi.OpenApiResponses.class, + io.apicurio.datamodels.models.openapi.OpenApiSchema.class, + io.apicurio.datamodels.models.openapi.OpenApiSecurityRequirement.class, + io.apicurio.datamodels.models.openapi.OpenApiSecurityScheme.class, + io.apicurio.datamodels.models.openapi.OpenApiServer.class, + io.apicurio.datamodels.models.openapi.OpenApiServerVariable.class, + io.apicurio.datamodels.models.openapi.OpenApiTag.class, + io.apicurio.datamodels.models.openapi.OpenApiXML.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Contact.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ContactImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Definitions.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20DefinitionsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Document.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20DocumentImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Example.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ExampleImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ExternalDocumentation.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Header.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20HeaderImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Headers.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20HeadersImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Info.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20InfoImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Items.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ItemsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20License.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20LicenseImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Operation.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20OperationImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Parameter.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterDefinitions.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterDefinitionsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ParameterImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20PathItem.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20PathItemImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Paths.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20PathsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Response.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseDefinitions.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseDefinitionsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponseImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Responses.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ResponsesImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Schema.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SchemaImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Scopes.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20ScopesImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityDefinitions.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityDefinitionsImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityRequirement.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityRequirementImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecurityScheme.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20SecuritySchemeImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20Tag.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20TagImpl.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20XML.class, + io.apicurio.datamodels.models.openapi.v20.OpenApi20XMLImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Callback.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30CallbackImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Components.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ComponentsImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Contact.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ContactImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Discriminator.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30DiscriminatorImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Document.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30DocumentImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Encoding.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30EncodingImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Example.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ExampleImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ExternalDocumentation.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Header.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30HeaderImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Info.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30InfoImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30License.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30LicenseImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Link.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30LinkImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30MediaType.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30MediaTypeImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlow.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlowImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlows.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30OAuthFlowsImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Operation.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30OperationImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Parameter.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ParameterImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30PathItem.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30PathItemImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Paths.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30PathsImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30RequestBody.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30RequestBodyImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Response.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ResponseImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Responses.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ResponsesImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Schema.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30SchemaImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityRequirement.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityRequirementImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityScheme.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30SecuritySchemeImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Server.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerVariable.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30ServerVariableImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30Tag.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30TagImpl.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30XML.class, + io.apicurio.datamodels.models.openapi.v30.OpenApi30XMLImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Callback.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31CallbackImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Components.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ComponentsImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Contact.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ContactImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Discriminator.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31DiscriminatorImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Document.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31DocumentImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Encoding.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31EncodingImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Example.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ExampleImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ExternalDocumentation.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ExternalDocumentationImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Header.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31HeaderImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Info.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31InfoImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31License.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31LicenseImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Link.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31LinkImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31MediaType.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31MediaTypeImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlow.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlowImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlows.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31OAuthFlowsImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Operation.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31OperationImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Parameter.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ParameterImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31PathItem.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31PathItemImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Paths.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31PathsImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31RequestBody.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31RequestBodyImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Response.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ResponseImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Responses.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ResponsesImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Schema.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31SchemaImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityRequirement.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityRequirementImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31SecurityScheme.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31SecuritySchemeImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Server.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerVariable.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31ServerVariableImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31Tag.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31TagImpl.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31XML.class, + io.apicurio.datamodels.models.openapi.v31.OpenApi31XMLImpl.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidApiDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidContactEmailRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidContactUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidExternalDocsDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidLicenseUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidServerDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidServerUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidTagDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.InvalidTermsOfServiceUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidApiBasePathRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidApiHostRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidExampleDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidExternalDocsUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidHeaderDefaultValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidHeaderDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidLinkDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthAuthorizationUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthRefreshUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOAuthTokenUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOpenIDConnectUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationConsumesRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidOperationProducesRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidParameterDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidPathItemDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidRequestBodyDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidResponseDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSchemaItemsDefaultValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeAuthUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidSecuritySchemeTokenUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidServerVariableDescriptionRule.class, + io.apicurio.datamodels.validation.rules.invalid.format.OasInvalidXmlNamespaceUrlRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasDuplicatePathSegmentRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasEmptyPathSegmentRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasIdenticalPathTemplateRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidCallbackDefinitionNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidExampleDefinitionNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidHeaderDefinitionNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidHttpResponseCodeRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidLinkDefinitionNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidParameterDefNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidPathSegmentRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidPropertyNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidRequestBodyDefinitionNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidResponseDefNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidSchemaDefNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidScopeNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasInvalidSecuritySchemeNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasUnmatchedEncodingPropertyRule.class, + io.apicurio.datamodels.validation.rules.invalid.name.OasUnmatchedExampleTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidCallbackReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidExampleReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidHeaderReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidLinkOperationReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidLinkReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidParameterReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidPathItemReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidRequestBodyReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidResponseReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSchemaReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSecurityRequirementNameRule.class, + io.apicurio.datamodels.validation.rules.invalid.reference.OasInvalidSecuritySchemeReferenceRule.class, + io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidPropertyTypeValidationRule.class, + io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidSchemaArrayItemsRule.class, + io.apicurio.datamodels.validation.rules.invalid.type.OasInvalidSchemaTypeValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.AbstractInvalidPropertyValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasAllowReservedNotAllowedForParamRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasAllowReservedNotAllowedRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasEncodingStyleNotAllowedRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasExplodeNotAllowedRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasFormDataParamNotAllowedRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiConsumesMTRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiProducesMTRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidApiSchemeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidEncodingForMPMTRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidHeaderStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidHttpSecuritySchemeTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidLinkOperationIdRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidOperationIdRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidOperationSchemeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasInvalidSecurityReqScopesRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasMissingPathParamDefinitionRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasMissingResponseForOperationRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasOperationSummaryTooLongRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasPathParamNotFoundRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasRequiredParamWithDefaultValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasSecurityRequirementScopesMustBeEmptyRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasServerVarNotFoundInTemplateRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedArrayCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedHeaderCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedHeaderUsageRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedNumOfParamMTsRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedNumberOfHeaderMTsRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamAllowEmptyValueRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedParamMultiRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedRequestBodyRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedSecurityRequirementScopesRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedUsageOfBearerTokenRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedUsageOfDiscriminatorRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnexpectedXmlWrappingRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownApiKeyLocationRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownArrayTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownCookieParamStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownEncodingStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderParamStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownHeaderTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownOauthFlowTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamCollectionFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamFormatRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamLocationRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownParamTypeRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownPathParamStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownQueryParamStyleRule.class, + io.apicurio.datamodels.validation.rules.invalid.value.OasUnknownSecuritySchemeTypeRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasBodyAndFormDataMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasExampleValueMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasHeaderExamplesMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasHeaderSchemaContentMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasLinkOperationRefMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasMediaTypeExamplesMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasParameterExamplesMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.mutex.OasParameterSchemaContentMutualExclusivityRule.class, + io.apicurio.datamodels.validation.rules.other.OasBodyParameterUniquenessValidationRule.class, + io.apicurio.datamodels.validation.rules.other.OasIgnoredContentTypeHeaderRule.class, + io.apicurio.datamodels.validation.rules.other.OasIgnoredHeaderParameterRule.class, + io.apicurio.datamodels.validation.rules.other.OasOperationIdUniquenessValidationRule.class, + io.apicurio.datamodels.validation.rules.other.OasParameterUniquenessValidationRule.class, + io.apicurio.datamodels.validation.rules.other.OasUnknownPropertyRule.class, + io.apicurio.datamodels.validation.rules.other.SecurityRequirementUniquenessValidationRule.class, + io.apicurio.datamodels.validation.rules.other.TagUniquenessValidationRule.class, + io.apicurio.datamodels.validation.rules.required.AaMissingCorrelationIdRule.class, + io.apicurio.datamodels.validation.rules.required.AasMissingServerProtocolRule.class, + io.apicurio.datamodels.validation.rules.required.MissingApiKeySchemeParamLocationRule.class, + io.apicurio.datamodels.validation.rules.required.MissingApiKeySchemeParamNameRule.class, + io.apicurio.datamodels.validation.rules.required.MissingApiTitleRule.class, + io.apicurio.datamodels.validation.rules.required.MissingApiVersionRule.class, + io.apicurio.datamodels.validation.rules.required.MissingHttpSecuritySchemeTypeRule.class, + io.apicurio.datamodels.validation.rules.required.MissingLicenseNameRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowAuthUrlRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowRokenUrlRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOAuthFlowScopesRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOAuthSecuritySchemeFlowsRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOpenIdConnectSecuritySchemeConnectUrlRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOperationDescriptionRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOperationIdRule.class, + io.apicurio.datamodels.validation.rules.required.MissingOperationSummaryRule.class, + io.apicurio.datamodels.validation.rules.required.MissingSecuritySchemeTypeRule.class, + io.apicurio.datamodels.validation.rules.required.MissingServerTemplateUrlRule.class, + io.apicurio.datamodels.validation.rules.required.MissingTagNameRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingApiInformationRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingApiPathsRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingBodyParameterSchemaRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingDiscriminatorPropertyNameRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingExternalDocumentationUrlRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingHeaderArrayInformationRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingHeaderTypeRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingItemsArrayInformationRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingItemsTypeRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeAuthUrlRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeFlowTypeRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeScopesRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOAuthSchemeTokenUrlRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOpenApiPropertyRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOperationResponsesRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingOperationTagsRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingParameterArrayTypeRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingParameterLocationRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingParameterNameRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingParameterTypeRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingRequestBodyContentRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingResponseDefinitionDescriptionRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingResponseDescriptionRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingSchemaArrayInformationRule.class, + io.apicurio.datamodels.validation.rules.required.OasMissingServerVarDefaultValueRule.class, + io.apicurio.datamodels.validation.rules.required.OasPathParamsMustBeRequiredRule.class }) public class ApicurioRegisterForReflection { diff --git a/app/src/main/java/io/apicurio/registry/AppConfiguration.java b/app/src/main/java/io/apicurio/registry/AppConfiguration.java index 55b4f0df7e..687e2841fa 100644 --- a/app/src/main/java/io/apicurio/registry/AppConfiguration.java +++ b/app/src/main/java/io/apicurio/registry/AppConfiguration.java @@ -1,16 +1,14 @@ package io.apicurio.registry; -import java.util.Properties; - +import io.apicurio.registry.utils.PropertiesUtil; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.inject.Produces; import jakarta.enterprise.inject.spi.InjectionPoint; -import io.apicurio.registry.utils.PropertiesUtil; +import java.util.Properties; /** * Generic configuration. - * */ @ApplicationScoped public class AppConfiguration { diff --git a/app/src/main/java/io/apicurio/registry/ImportLifecycleBean.java b/app/src/main/java/io/apicurio/registry/ImportLifecycleBean.java index 09e0d09418..9288861beb 100644 --- a/app/src/main/java/io/apicurio/registry/ImportLifecycleBean.java +++ b/app/src/main/java/io/apicurio/registry/ImportLifecycleBean.java @@ -41,7 +41,8 @@ void onStorageReady(@ObservesAsync StorageEvent ev) { if (StorageEventType.READY.equals(ev.getType()) && registryImportUrlProp.isPresent()) { log.info("Import URL exists."); final URL registryImportUrl = registryImportUrlProp.get(); - try (final InputStream registryImportZip = new BufferedInputStream(registryImportUrl.openStream())) { + try (final InputStream registryImportZip = new BufferedInputStream( + registryImportUrl.openStream())) { log.info("Importing {} on startup.", registryImportUrl); final ZipInputStream zip = new ZipInputStream(registryImportZip, StandardCharsets.UTF_8); final EntityReader reader = new EntityReader(zip); diff --git a/app/src/main/java/io/apicurio/registry/URLRegisterForReflection.java b/app/src/main/java/io/apicurio/registry/URLRegisterForReflection.java index 14d019c4c8..372e5f95ca 100644 --- a/app/src/main/java/io/apicurio/registry/URLRegisterForReflection.java +++ b/app/src/main/java/io/apicurio/registry/URLRegisterForReflection.java @@ -1,9 +1,9 @@ package io.apicurio.registry; -import java.net.URL; - import io.quarkus.runtime.annotations.RegisterForReflection; +import java.net.URL; + @RegisterForReflection(targets = URL.class) public class URLRegisterForReflection { diff --git a/app/src/main/java/io/apicurio/registry/auth/AbstractAccessController.java b/app/src/main/java/io/apicurio/registry/auth/AbstractAccessController.java index 899b9ce9ba..fd18b7f981 100644 --- a/app/src/main/java/io/apicurio/registry/auth/AbstractAccessController.java +++ b/app/src/main/java/io/apicurio/registry/auth/AbstractAccessController.java @@ -30,7 +30,8 @@ protected boolean isOwner(InvocationContext context) { String groupId = getStringParam(context, 0); String artifactId = getStringParam(context, 1); return verifyArtifactOwner(groupId, artifactId); - } else if (style == AuthorizedStyle.GroupOnly && authConfig.ownerOnlyAuthorizationLimitGroupAccess.get()) { + } else if (style == AuthorizedStyle.GroupOnly + && authConfig.ownerOnlyAuthorizationLimitGroupAccess.get()) { String groupId = getStringParam(context, 0); return verifyGroupOwner(groupId); } else if (style == AuthorizedStyle.ArtifactOnly) { @@ -71,7 +72,8 @@ private boolean verifyArtifactOwner(String groupId, String artifactId) { private boolean verifyArtifactOwner(long globalId) { try { ArtifactVersionMetaDataDto versionMetaData = storage.getArtifactVersionMetaData(globalId); - ArtifactMetaDataDto dto = storage.getArtifactMetaData(versionMetaData.getGroupId(), versionMetaData.getArtifactId()); + ArtifactMetaDataDto dto = storage.getArtifactMetaData(versionMetaData.getGroupId(), + versionMetaData.getArtifactId()); String owner = dto.getOwner(); return owner == null || owner.equals(securityIdentity.getPrincipal().getName()); } catch (NotFoundException nfe) { diff --git a/app/src/main/java/io/apicurio/registry/auth/AuthConfig.java b/app/src/main/java/io/apicurio/registry/auth/AuthConfig.java index 100f3186d9..40153b4276 100644 --- a/app/src/main/java/io/apicurio/registry/auth/AuthConfig.java +++ b/app/src/main/java/io/apicurio/registry/auth/AuthConfig.java @@ -1,15 +1,14 @@ package io.apicurio.registry.auth; -import java.util.function.Supplier; - +import io.apicurio.common.apps.config.Dynamic; +import io.apicurio.common.apps.config.Info; +import jakarta.annotation.PostConstruct; import jakarta.inject.Inject; import jakarta.inject.Singleton; -import jakarta.annotation.PostConstruct; import org.eclipse.microprofile.config.inject.ConfigProperty; import org.slf4j.Logger; -import io.apicurio.common.apps.config.Dynamic; -import io.apicurio.common.apps.config.Info; +import java.util.function.Supplier; @Singleton public class AuthConfig { @@ -40,9 +39,7 @@ public class AuthConfig { Supplier ownerOnlyAuthorizationEnabled; @Dynamic(label = "Artifact group owner-only authorization", description = "When selected, Service Registry allows only the artifact group owner (creator) to modify an artifact group.", requires = { - "apicurio.auth.enabled=true", - "apicurio.auth.owner-only-authorization=true" - }) + "apicurio.auth.enabled=true", "apicurio.auth.owner-only-authorization=true" }) @ConfigProperty(name = "apicurio.auth.owner-only-authorization.limit-group-access", defaultValue = "false") @Info(category = "auth", description = "Artifact group owner-only authorization", availableSince = "2.1.0.Final") Supplier ownerOnlyAuthorizationLimitGroupAccess; @@ -53,9 +50,7 @@ public class AuthConfig { Supplier anonymousReadAccessEnabled; @Dynamic(label = "Authenticated read access", description = "When selected, requests from any authenticated user are granted at least read-only access.", requires = { - "apicurio.auth.enabled=true", - "apicurio.auth.role-based-authorization=true" - }) + "apicurio.auth.enabled=true", "apicurio.auth.role-based-authorization=true" }) @ConfigProperty(name = "apicurio.auth.authenticated-read-access.enabled", defaultValue = "false") @Info(category = "auth", description = "Authenticated read access", availableSince = "2.1.4.Final") Supplier authenticatedReadAccessEnabled; diff --git a/app/src/main/java/io/apicurio/registry/auth/Authorized.java b/app/src/main/java/io/apicurio/registry/auth/Authorized.java index 0b7868a776..0078896d5c 100644 --- a/app/src/main/java/io/apicurio/registry/auth/Authorized.java +++ b/app/src/main/java/io/apicurio/registry/auth/Authorized.java @@ -12,7 +12,7 @@ @InterceptorBinding @Inherited @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE, ElementType.METHOD}) +@Target({ ElementType.TYPE, ElementType.METHOD }) public @interface Authorized { @Nonbinding diff --git a/app/src/main/java/io/apicurio/registry/auth/AuthorizedInterceptor.java b/app/src/main/java/io/apicurio/registry/auth/AuthorizedInterceptor.java index ff81847d9a..9163cb4e5c 100644 --- a/app/src/main/java/io/apicurio/registry/auth/AuthorizedInterceptor.java +++ b/app/src/main/java/io/apicurio/registry/auth/AuthorizedInterceptor.java @@ -13,12 +13,10 @@ import org.slf4j.Logger; /** - * This class implements authorization logic for the registry. It is driven by a combination of the - * security identity (authenticated user) and configured security level of the operation the user is - * attempting to perform. This interceptor will be triggered - * for any method that is annotated with the {@link Authorized} annotation. Please ensure that all - * JAX-RS operations are propertly annotated. - * + * This class implements authorization logic for the registry. It is driven by a combination of the security + * identity (authenticated user) and configured security level of the operation the user is attempting to + * perform. This interceptor will be triggered for any method that is annotated with the {@link Authorized} + * annotation. Please ensure that all JAX-RS operations are propertly annotated. */ @Authorized @Interceptor @@ -51,10 +49,12 @@ public Object authorizeMethod(InvocationContext context) throws Exception { // If the user is trying to invoke a role-mapping operation, deny it if // database based RBAC is not enabled. - RoleBasedAccessApiOperation rbacOpAnnotation = context.getMethod().getAnnotation(RoleBasedAccessApiOperation.class); + RoleBasedAccessApiOperation rbacOpAnnotation = context.getMethod() + .getAnnotation(RoleBasedAccessApiOperation.class); if (rbacOpAnnotation != null) { if (!authConfig.isApplicationRbacEnabled()) { - log.warn("Access to /admin/roleMappings denied because application managed RBAC is not enabled."); + log.warn( + "Access to /admin/roleMappings denied because application managed RBAC is not enabled."); throw new ForbiddenException("Application RBAC not enabled."); } } @@ -81,7 +81,8 @@ public Object authorizeMethod(InvocationContext context) throws Exception { // Anonymous users are allowed to perform read-only operations, but only if // apicurio.auth.anonymous-read-access.enabled is set to 'true' - if (authConfig.anonymousReadAccessEnabled.get() && annotation.level() == AuthorizedLevel.Read) { + if (authConfig.anonymousReadAccessEnabled.get() + && annotation.level() == AuthorizedLevel.Read) { log.trace("Anonymous user is being granted access to read-only operation."); return context.proceed(); } @@ -120,13 +121,15 @@ public Object authorizeMethod(InvocationContext context) throws Exception { // If RBAC is enabled, apply role based rules if (authConfig.roleBasedAuthorizationEnabled && !rbac.isAuthorized(context)) { log.warn("RBAC enabled and required role missing."); - throw new ForbiddenException("User " + securityIdentity.getPrincipal().getName() + " is not authorized to perform the requested operation."); + throw new ForbiddenException("User " + securityIdentity.getPrincipal().getName() + + " is not authorized to perform the requested operation."); } // If Owner-only is enabled, apply ownership rules if (authConfig.ownerOnlyAuthorizationEnabled.get() && !obac.isAuthorized(context)) { log.warn("OBAC enabled and operation not permitted due to wrong owner."); - throw new ForbiddenException("User " + securityIdentity.getPrincipal().getName() + " is not authorized to perform the requested operation."); + throw new ForbiddenException("User " + securityIdentity.getPrincipal().getName() + + " is not authorized to perform the requested operation."); } return context.proceed(); diff --git a/app/src/main/java/io/apicurio/registry/auth/HeaderRoleProvider.java b/app/src/main/java/io/apicurio/registry/auth/HeaderRoleProvider.java index 5134c7bc75..d2be0b518a 100644 --- a/app/src/main/java/io/apicurio/registry/auth/HeaderRoleProvider.java +++ b/app/src/main/java/io/apicurio/registry/auth/HeaderRoleProvider.java @@ -1,12 +1,12 @@ package io.apicurio.registry.auth; import io.apicurio.common.apps.config.Info; -import org.eclipse.microprofile.config.inject.ConfigProperty; - import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletRequest; import jakarta.ws.rs.core.Context; +import org.eclipse.microprofile.config.inject.ConfigProperty; + import java.util.Objects; @RequestScoped diff --git a/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessApiOperation.java b/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessApiOperation.java index 4540824a18..06455764e8 100644 --- a/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessApiOperation.java +++ b/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessApiOperation.java @@ -6,7 +6,7 @@ import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) +@Target({ ElementType.METHOD }) public @interface RoleBasedAccessApiOperation { } diff --git a/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessController.java b/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessController.java index 63175b0159..d6049ad5e0 100644 --- a/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessController.java +++ b/app/src/main/java/io/apicurio/registry/auth/RoleBasedAccessController.java @@ -24,7 +24,8 @@ public boolean isAuthorized(InvocationContext context) { Authorized annotation = context.getMethod().getAnnotation(Authorized.class); AuthorizedLevel level = annotation.level(); - // If the method has a "dryRun" query param set to True then downgrade the required level from Write to Read + // If the method has a "dryRun" query param set to True then downgrade the required level from Write + // to Read if (annotation.dryRunParam() != -1 && level == AuthorizedLevel.Write) { Boolean dryRun = (Boolean) context.getParameters()[annotation.dryRunParam()]; if (dryRun != null && dryRun.equals(Boolean.TRUE)) { diff --git a/app/src/main/java/io/apicurio/registry/auth/StorageRoleProvider.java b/app/src/main/java/io/apicurio/registry/auth/StorageRoleProvider.java index 851322b113..a8b561a369 100644 --- a/app/src/main/java/io/apicurio/registry/auth/StorageRoleProvider.java +++ b/app/src/main/java/io/apicurio/registry/auth/StorageRoleProvider.java @@ -1,13 +1,12 @@ package io.apicurio.registry.auth; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.enterprise.inject.Instance; -import jakarta.inject.Inject; - import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.types.Current; import io.apicurio.registry.types.RoleType; import io.quarkus.security.identity.SecurityIdentity; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.Instance; +import jakarta.inject.Inject; import org.eclipse.microprofile.jwt.JsonWebToken; @ApplicationScoped @@ -16,7 +15,7 @@ public class StorageRoleProvider implements RoleProvider { @Inject SecurityIdentity securityIdentity; - //We need to inject the identityToken so we can check some claims when needed. + // We need to inject the identityToken so we can check some claims when needed. @Inject Instance identityToken; @@ -29,7 +28,7 @@ public class StorageRoleProvider implements RoleProvider { private boolean hasRole(String role) { String role4principal = storage.getRoleForPrincipal(securityIdentity.getPrincipal().getName()); boolean hasRole = role.equals(role4principal); - //Check for Keycloak service accounts since they're prefixed with service-account. + // Check for Keycloak service accounts since they're prefixed with service-account. if (!hasRole && tokenHasAzpClaim()) { hasRole = role.equals(storage.getRoleForPrincipal(identityToken.get().getClaim(AZP_CLAIM))); } diff --git a/app/src/main/java/io/apicurio/registry/auth/TokenRoleProvider.java b/app/src/main/java/io/apicurio/registry/auth/TokenRoleProvider.java index e0962888c5..a9aa36e4a1 100644 --- a/app/src/main/java/io/apicurio/registry/auth/TokenRoleProvider.java +++ b/app/src/main/java/io/apicurio/registry/auth/TokenRoleProvider.java @@ -1,10 +1,9 @@ package io.apicurio.registry.auth; +import io.quarkus.security.identity.SecurityIdentity; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; -import io.quarkus.security.identity.SecurityIdentity; - @ApplicationScoped public class TokenRoleProvider implements RoleProvider { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityCheckResponse.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityCheckResponse.java index 39ae488718..57fd7f922f 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityCheckResponse.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityCheckResponse.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -14,7 +13,6 @@ /** * Immutable. - * */ @JsonAutoDetect(isGetterVisibility = NONE) @NoArgsConstructor // required for Jackson @@ -27,7 +25,8 @@ public class CompatibilityCheckResponse { public static final CompatibilityCheckResponse IS_COMPATIBLE = new CompatibilityCheckResponse(true, null); - public static final CompatibilityCheckResponse IS_NOT_COMPATIBLE = new CompatibilityCheckResponse(false, null); + public static final CompatibilityCheckResponse IS_NOT_COMPATIBLE = new CompatibilityCheckResponse(false, + null); public static CompatibilityCheckResponse create(boolean isCompatible) { return isCompatible ? IS_COMPATIBLE : IS_NOT_COMPATIBLE; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelDto.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelDto.java index bbf6a634cf..2173488e55 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelDto.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelDto.java @@ -16,7 +16,6 @@ /** * Immutable. - * */ @JsonAutoDetect(isGetterVisibility = NONE) @NoArgsConstructor // required for Jackson @@ -34,13 +33,9 @@ public static CompatibilityLevelDto create(Optional source) private Level compatibility; public enum Level { - BACKWARD("BACKWARD"), - BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - FORWARD("FORWARD"), - FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - FULL("FULL"), - FULL_TRANSITIVE("FULL_TRANSITIVE"), - NONE("NONE"); + BACKWARD("BACKWARD"), BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), FORWARD( + "FORWARD"), FORWARD_TRANSITIVE( + "FORWARD_TRANSITIVE"), FULL("FULL"), FULL_TRANSITIVE("FULL_TRANSITIVE"), NONE("NONE"); public static Level create(Optional source) { return source.map(c -> { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelParamDto.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelParamDto.java index d29ec6d494..03d6d63378 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelParamDto.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/CompatibilityLevelParamDto.java @@ -1,7 +1,6 @@ package io.apicurio.registry.ccompat.dto; import com.fasterxml.jackson.annotation.JsonAutoDetect; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -21,7 +20,7 @@ @EqualsAndHashCode @ToString @RegisterForReflection -public class CompatibilityLevelParamDto{ +public class CompatibilityLevelParamDto { private String compatibilityLevel; } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/ExporterStatus.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/ExporterStatus.java index 8948162031..b2b48692bb 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/ExporterStatus.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/ExporterStatus.java @@ -9,7 +9,6 @@ import lombok.NoArgsConstructor; import lombok.ToString; - import static com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility.NONE; @JsonAutoDetect(isGetterVisibility = NONE) diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/ModeDto.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/ModeDto.java index 662644867d..8965779459 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/ModeDto.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/ModeDto.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/Schema.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/Schema.java index c505b6b43b..24f087966c 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/Schema.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/Schema.java @@ -3,7 +3,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; - import io.apicurio.registry.ccompat.SchemaTypeFilter; import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaContent.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaContent.java index bf8a8a137a..6066c263f2 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaContent.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaContent.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.*; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaId.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaId.java index b59aea1fb2..c722034fce 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaId.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaId.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaInfo.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaInfo.java index b9b8226ec6..3fc1c4d209 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaInfo.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaInfo.java @@ -3,7 +3,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; - import io.apicurio.registry.ccompat.SchemaTypeFilter; import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.*; @@ -42,5 +41,4 @@ public SchemaInfo(String schema) { public SchemaInfo() { } - } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaReference.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaReference.java index 0ce62ce3a8..5ff77959ef 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaReference.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/SchemaReference.java @@ -16,9 +16,8 @@ public class SchemaReference implements Comparable { private Integer version; @JsonCreator - public SchemaReference(@JsonProperty("name") String name, - @JsonProperty("subject") String subject, - @JsonProperty("version") Integer version) { + public SchemaReference(@JsonProperty("name") String name, @JsonProperty("subject") String subject, + @JsonProperty("version") Integer version) { this.name = name; this.subject = subject; this.version = version; @@ -63,8 +62,7 @@ public boolean equals(Object o) { return false; } SchemaReference that = (SchemaReference) o; - return Objects.equals(name, that.name) - && Objects.equals(subject, that.subject) + return Objects.equals(name, that.name) && Objects.equals(subject, that.subject) && Objects.equals(version, that.version); } @@ -85,15 +83,6 @@ public int compareTo(SchemaReference that) { @Override public String toString() { - return "{" - + "name='" - + name - + '\'' - + ", subject='" - + subject - + '\'' - + ", version=" - + version - + '}'; + return "{" + "name='" + name + '\'' + ", subject='" + subject + '\'' + ", version=" + version + '}'; } } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/dto/SubjectVersion.java b/app/src/main/java/io/apicurio/registry/ccompat/dto/SubjectVersion.java index c30b8cb0bf..7acd76404f 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/dto/SubjectVersion.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/dto/SubjectVersion.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.*; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ConflictException.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ConflictException.java index 38db5ea787..ec99e5e060 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ConflictException.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ConflictException.java @@ -3,14 +3,12 @@ import io.apicurio.registry.types.RegistryException; /** - * This exception covers the following errors in the compat API: - * - 409 Conflict – Incompatible schema + * This exception covers the following errors in the compat API: - 409 Conflict – Incompatible schema */ public class ConflictException extends RegistryException { private static final long serialVersionUID = 5511072429790259605L; - public ConflictException(Throwable cause) { super(cause); } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ErrorCode.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ErrorCode.java index 032ec40ae3..d465ffc414 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ErrorCode.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/ErrorCode.java @@ -2,7 +2,14 @@ public enum ErrorCode { - SUBJECT_NOT_FOUND(40401), VERSION_NOT_FOUND(40402), SCHEMA_NOT_FOUND(40403), SUBJECT_SOFT_DELETED(40404), SUBJECT_NOT_SOFT_DELETED(40405), SCHEMA_VERSION_SOFT_DELETED(40406), SCHEMA_VERSION_NOT_SOFT_DELETED(40407), SUBJECT_COMPATIBILITY_NOT_CONFIGURED(40408), INVALID_SCHEMA(42201), INVALID_VERSION(42202), INVALID_COMPATIBILITY_LEVEL(42203), OPERATION_NOT_PERMITTED(42205), REFERENCE_EXISTS(42206), INVALID_SUBJECT(42208), SERVER_ERROR(50001), OPERATION_TIMEOUT(50002), FORWARDING_ERROR(50003); + SUBJECT_NOT_FOUND(40401), VERSION_NOT_FOUND(40402), SCHEMA_NOT_FOUND(40403), SUBJECT_SOFT_DELETED( + 40404), SUBJECT_NOT_SOFT_DELETED(40405), SCHEMA_VERSION_SOFT_DELETED( + 40406), SCHEMA_VERSION_NOT_SOFT_DELETED(40407), SUBJECT_COMPATIBILITY_NOT_CONFIGURED( + 40408), INVALID_SCHEMA(42201), INVALID_VERSION( + 42202), INVALID_COMPATIBILITY_LEVEL(42203), OPERATION_NOT_PERMITTED( + 42205), REFERENCE_EXISTS(42206), INVALID_SUBJECT( + 42208), SERVER_ERROR(50001), OPERATION_TIMEOUT( + 50002), FORWARDING_ERROR(50003); private final int value; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/UnprocessableEntityException.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/UnprocessableEntityException.java index 83ee80e34a..c4d815c38b 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/error/UnprocessableEntityException.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/error/UnprocessableEntityException.java @@ -3,21 +3,17 @@ import io.apicurio.registry.types.RegistryException; /** - * This exception covers the following errors in the compat API: - * - Error code 42201 – Invalid schema - * - Error code 42202 – Invalid schema version - * - Error code 42203 – Invalid compatibility level + * This exception covers the following errors in the compat API: - Error code 42201 – Invalid schema - Error + * code 42202 – Invalid schema version - Error code 42203 – Invalid compatibility level */ public class UnprocessableEntityException extends RegistryException { private static final long serialVersionUID = 1791019542026597523L; - public UnprocessableEntityException(String message) { super(message); } - public UnprocessableEntityException(Throwable cause) { super(cause); } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/CompatibilityResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/CompatibilityResource.java index 21bd79cec4..85766a548c 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/CompatibilityResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/CompatibilityResource.java @@ -11,72 +11,62 @@ /** * Note: *

- * This API specification is owned by Confluent. - * - * The compatibility resource allows the user to test schemas for compatibility against specific versions of a subject’s schema. - * + * This API + * specification is owned by Confluent. The compatibility resource allows the user to test schemas for + * compatibility against specific versions of a subject’s schema. */ @Path("/apis/ccompat/v7/compatibility") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface CompatibilityResource { // ----- Path: /compatibility/subjects/{subject}/versions/{version} ----- /** - * Perform a compatibility check on the schema against one or more versions in the subject, depending on how the compatibility is set. - * For example, if compatibility on the subject is set to BACKWARD, FORWARD, or FULL, the compatibility check is against the latest version. - * If compatibility is set to one of the TRANSITIVE types, the check is against all previous versions. + * Perform a compatibility check on the schema against one or more versions in the subject, depending on + * how the compatibility is set. For example, if compatibility on the subject is set to BACKWARD, FORWARD, + * or FULL, the compatibility check is against the latest version. If compatibility is set to one of the + * TRANSITIVE types, the check is against all previous versions. * * @param subject Subject of the schema version against which compatibility is to be tested - * @param verbose Add ?verbose=true at the end of this request to output the reason a schema fails the compatibility test, in cases where it fails. The default is false (the reason a schema fails compatibility test is not given). - * - * Status Codes: - * 404 Not Found - * Error code 40401 – Subject not found - * Error code 40402 – Version not found - * 422 Unprocessable Entity - * Error code 42201 – Invalid schema - * Error code 42202 – Invalid version - * 500 Internal Server Error - * Error code 50001 – Error in the backend data store + * @param verbose Add ?verbose=true at the end of this request to output the reason a schema fails the + * compatibility test, in cases where it fails. The default is false (the reason a schema fails + * compatibility test is not given). Status Codes: 404 Not Found Error code 40401 – Subject not + * found Error code 40402 – Version not found 422 Unprocessable Entity Error code 42201 – + * Invalid schema Error code 42202 – Invalid version 500 Internal Server Error Error code 50001 + * – Error in the backend data store */ @POST @Path("/subjects/{subject}/versions") - CompatibilityCheckResponse testCompatibilityBySubjectName( - @PathParam("subject") String subject, - @NotNull SchemaContent request, @QueryParam("verbose") Boolean verbose, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; + CompatibilityCheckResponse testCompatibilityBySubjectName(@PathParam("subject") String subject, + @NotNull SchemaContent request, @QueryParam("verbose") Boolean verbose, + @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; // ----- Path: /compatibility/subjects/{subject}/versions/{version} ----- /** - * Test input schema against a particular version of a subject’s schema for compatibility. - * Note that the compatibility level applied for the check - * is the configured compatibility level for the subject (GET /config/(string: subject)). - * If this subject’s compatibility level was never changed, - * then the global compatibility level applies (GET /config). + * Test input schema against a particular version of a subject’s schema for compatibility. Note that the + * compatibility level applied for the check is the configured compatibility level for the subject (GET + * /config/(string: subject)). If this subject’s compatibility level was never changed, then the global + * compatibility level applies (GET /config). * * @param subject Subject of the schema version against which compatibility is to be tested - * @param version Version of the subject’s schema against which compatibility is to be tested. - * Valid values for versionId are between [1,2^31-1] or the string "latest". - * "latest" checks compatibility of the input schema with the last registered schema under the specified subject - * @param verbose Add ?verbose=true at the end of this request to output the reason a schema fails the compatibility test, in cases where it fails. The default is false (the reason a schema fails compatibility test is not given). - * - * Status Codes: - * 404 Not Found - * Error code 40401 – Subject not found - * Error code 40402 – Version not found - * 422 Unprocessable Entity - * Error code 42201 – Invalid schema - * Error code 42202 – Invalid version - * 500 Internal Server Error - * Error code 50001 – Error in the backend data store + * @param version Version of the subject’s schema against which compatibility is to be tested. Valid + * values for versionId are between [1,2^31-1] or the string "latest". "latest" checks + * compatibility of the input schema with the last registered schema under the specified + * subject + * @param verbose Add ?verbose=true at the end of this request to output the reason a schema fails the + * compatibility test, in cases where it fails. The default is false (the reason a schema fails + * compatibility test is not given). Status Codes: 404 Not Found Error code 40401 – Subject not + * found Error code 40402 – Version not found 422 Unprocessable Entity Error code 42201 – + * Invalid schema Error code 42202 – Invalid version 500 Internal Server Error Error code 50001 + * – Error in the backend data store */ @POST @Path("/subjects/{subject}/versions/{version}") - CompatibilityCheckResponse testCompatibilityByVersion( - @PathParam("subject") String subject, - @PathParam("version") String version, - @NotNull SchemaContent request, @QueryParam("verbose") Boolean verbose, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; + CompatibilityCheckResponse testCompatibilityByVersion(@PathParam("subject") String subject, + @PathParam("version") String version, @NotNull SchemaContent request, + @QueryParam("verbose") Boolean verbose, @HeaderParam(Headers.GROUP_ID) String groupId) + throws Exception; } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ConfigResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ConfigResource.java index 5834341293..7822ad0034 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ConfigResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ConfigResource.java @@ -5,7 +5,6 @@ import io.apicurio.registry.rest.Headers; import jakarta.validation.constraints.NotNull; import jakarta.ws.rs.*; - import jakarta.ws.rs.HeaderParam; import static io.apicurio.registry.ccompat.rest.ContentTypes.*; @@ -13,109 +12,73 @@ /** * Note: *

- * This API specification is owned by Confluent. - * - * The config resource allows you to inspect the cluster-level configuration values as well as subject overrides. - * + * This API + * specification is owned by Confluent. The config resource allows you to inspect the cluster-level + * configuration values as well as subject overrides. */ @Path("/apis/ccompat/v7/config") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface ConfigResource { - // ----- Path: /config ----- /** - * Get global compatibility level. - * - * Response: - * - compatibility (string) – Global compatibility level. Will be one of - * BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE - * - * Status Codes: - * 500 Internal Server Error - * Error code 50001 – Error in the backend data store + * Get global compatibility level. Response: - compatibility (string) – Global compatibility level. Will + * be one of BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE + * Status Codes: 500 Internal Server Error Error code 50001 – Error in the backend data store */ @GET CompatibilityLevelParamDto getGlobalCompatibilityLevel(); - /** - * Update global compatibility level. - * - * Request: - * - compatibility (string) – New global compatibility level. Must be one of - * BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE - * - * Status Codes: - * 422 Unprocessable Entity - * Error code 42203 – Invalid compatibility level - * 500 Internal Server Error - * Error code 50001 – Error in the backend data store + * Update global compatibility level. Request: - compatibility (string) – New global compatibility level. + * Must be one of BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE + * Status Codes: 422 Unprocessable Entity Error code 42203 – Invalid compatibility level 500 Internal + * Server Error Error code 50001 – Error in the backend data store */ @PUT - CompatibilityLevelDto updateGlobalCompatibilityLevel( - @NotNull CompatibilityLevelDto request); - + CompatibilityLevelDto updateGlobalCompatibilityLevel(@NotNull CompatibilityLevelDto request); // ----- Path: /config/{subject} ----- - /** * Get compatibility level for a subject. * - * @param subject (string) – Name of the subject - * - * Request: - * - compatibility (string) – Compatibility level for the subject. Will be one of - * BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE - * - * Status Codes: - * 404 Not Found – Subject not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store + * @param subject (string) – Name of the subject Request: - compatibility (string) – Compatibility level + * for the subject. Will be one of BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, + * FULL, FULL_TRANSITIVE, NONE Status Codes: 404 Not Found – Subject not found 500 Internal + * Server Error – Error code 50001 – Error in the backend data store */ @Path("/{subject}") @GET - CompatibilityLevelParamDto getSubjectCompatibilityLevel(@PathParam("subject") String subject, @HeaderParam(Headers.GROUP_ID) String groupId); + CompatibilityLevelParamDto getSubjectCompatibilityLevel(@PathParam("subject") String subject, + @HeaderParam(Headers.GROUP_ID) String groupId); /** * Update compatibility level for the specified subject. * - * @param subject (string) – Name of the subject - * - * Request: - * - compatibility (string) – New compatibility level for the subject. Must be one of - * BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE - * - * Status Codes: - * 422 Unprocessable Entity – - * Error code 42203 – Invalid compatibility level - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store - * Error code 50003 – Error while forwarding the request to the primary + * @param subject (string) – Name of the subject Request: - compatibility (string) – New compatibility + * level for the subject. Must be one of BACKWARD, BACKWARD_TRANSITIVE, FORWARD, + * FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE Status Codes: 422 Unprocessable Entity – + * Error code 42203 – Invalid compatibility level 500 Internal Server Error – Error code 50001 + * – Error in the backend data store Error code 50003 – Error while forwarding the request to + * the primary */ @Path("/{subject}") @PUT - CompatibilityLevelDto updateSubjectCompatibilityLevel( - @PathParam("subject") String subject, + CompatibilityLevelDto updateSubjectCompatibilityLevel(@PathParam("subject") String subject, @NotNull CompatibilityLevelDto request, @HeaderParam(Headers.GROUP_ID) String groupId); /** * Deletes the specified subject-level compatibility level config and reverts to the global default. * - * @param subject (string) – Name of the subject - * - * Status Codes: - * 422 Unprocessable Entity – - * Error code 42203 – Invalid compatibility level - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store - * Error code 50003 – Error while forwarding the request to the primary + * @param subject (string) – Name of the subject Status Codes: 422 Unprocessable Entity – Error code 42203 + * – Invalid compatibility level 500 Internal Server Error – Error code 50001 – Error in the + * backend data store Error code 50003 – Error while forwarding the request to the primary */ @Path("/{subject}") @DELETE - CompatibilityLevelParamDto deleteSubjectCompatibility( - @PathParam("subject") String subject, @HeaderParam(Headers.GROUP_ID) String groupId); + CompatibilityLevelParamDto deleteSubjectCompatibility(@PathParam("subject") String subject, + @HeaderParam(Headers.GROUP_ID) String groupId); } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ContextResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ContextResource.java index 9c420fb8a5..2815a8c160 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ContextResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ContextResource.java @@ -1,6 +1,5 @@ package io.apicurio.registry.ccompat.rest.v7; - import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -16,16 +15,14 @@ /** * Note: *

- * This API specification is owned by Confluent. - * - * The contexts resource allows you to query the information or manipulate the lifecycle of schema contexts. - * - * We do not support this endpoint, if it's used, the default context will be returned. - * + * This API + * specification is owned by Confluent. The contexts resource allows you to query the information or + * manipulate the lifecycle of schema contexts. We do not support this endpoint, if it's used, the default + * context will be returned. */ @Path("/apis/ccompat/v7/contexts") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface ContextResource { @GET diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ExporterResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ExporterResource.java index 512a4cba76..c1b4d7d2f1 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ExporterResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ExporterResource.java @@ -2,7 +2,6 @@ import io.apicurio.registry.ccompat.dto.ExporterDto; import io.apicurio.registry.ccompat.dto.ExporterStatus; - import jakarta.ws.rs.Consumes; import jakarta.ws.rs.DELETE; import jakarta.ws.rs.GET; @@ -11,6 +10,7 @@ import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; import jakarta.ws.rs.Produces; + import java.util.List; import java.util.Map; @@ -22,14 +22,15 @@ /** * Note: *

- * This API specification is owned by Confluent. + * This API + * specification is owned by Confluent. *

- * The exporters resource allows you to query the information or manipulate the lifecycle of schema exporters.. - * + * The exporters resource allows you to query the information or manipulate the lifecycle of schema + * exporters.. */ @Path("/apis/ccompat/v7/exporters") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface ExporterResource { @GET @@ -64,7 +65,8 @@ public interface ExporterResource { @PUT @Path("/{exporter}/config") - String updateExporterConfig(@PathParam("exporter") String exporterName, Map config) throws Exception; + String updateExporterConfig(@PathParam("exporter") String exporterName, Map config) + throws Exception; @GET @Path("/{exporter}/status") diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ModeResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ModeResource.java index 4b8dc8f5ee..4a22e6421f 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ModeResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/ModeResource.java @@ -1,7 +1,6 @@ package io.apicurio.registry.ccompat.rest.v7; import io.apicurio.registry.ccompat.dto.ModeDto; - import jakarta.validation.constraints.NotNull; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; @@ -17,14 +16,13 @@ /** * Note: *

- * This API specification is owned by Confluent. - * - * We DO NOT support this endpoint. Fails with 404. - * + * This API + * specification is owned by Confluent. We DO NOT support this endpoint. Fails with 404. */ @Path("/apis/ccompat/v7/mode") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface ModeResource { // ----- Path: /mode ----- @@ -32,8 +30,6 @@ public interface ModeResource { @GET ModeDto getGlobalMode(); - @PUT - ModeDto updateGlobalMode( - @NotNull ModeDto request); + ModeDto updateGlobalMode(@NotNull ModeDto request); } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SchemasResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SchemasResource.java index 4468a2bfb2..8974a91006 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SchemasResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SchemasResource.java @@ -12,57 +12,37 @@ /** * Note: *

- * This API specification is owned by Confluent. - * + * This API + * specification is owned by Confluent. */ @Path("/apis/ccompat/v7/schemas") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface SchemasResource { // ----- Path: /schemas/ids/{globalId} ----- /** - * Get the schema string identified by the input ID. - * - * Parameters: + * Get the schema string identified by the input ID. Parameters: * * @param id (int) – the globally unique identifier of the schema - * @param subject (string) - add ?subject= at the end of this request to look for the subject in all contexts starting with the default context, - * and return the schema with the id from that context. - * - * Response JSON Object: - * - * schema (string) – Schema string identified by the ID - * - * Status Codes: - * - * 404 Not Found – - * Error code 40403 – Schema not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * @param subject (string) - add ?subject= at the end of this request to look for the + * subject in all contexts starting with the default context, and return the schema with the id + * from that context. Response JSON Object: schema (string) – Schema string identified by the + * ID Status Codes: 404 Not Found – Error code 40403 – Schema not found 500 Internal Server + * Error – Error code 50001 – Error in the backend datastore */ @GET @Path("/ids/{id}") - SchemaInfo getSchema(@PathParam("id") int id, @QueryParam("subject") String subject, @HeaderParam(Headers.GROUP_ID) String groupId); + SchemaInfo getSchema(@PathParam("id") int id, @QueryParam("subject") String subject, + @HeaderParam(Headers.GROUP_ID) String groupId); // ----- Path: /schemas/types ----- /** - * Get the schema types that are registered with Schema Registry. - * - * - * - * Response JSON Object: - * - * schema (string) – Schema types currently available on Schema Registry. - * - * Status Codes: - * - * 404 Not Found – - * Error code 40403 – Schema not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * Get the schema types that are registered with Schema Registry. Response JSON Object: schema (string) – + * Schema types currently available on Schema Registry. Status Codes: 404 Not Found – Error code 40403 – + * Schema not found 500 Internal Server Error – Error code 50001 – Error in the backend datastore */ @GET @Path("types") @@ -71,23 +51,12 @@ public interface SchemasResource { // ----- PATH: /schemas/ids/{int: id}/versions ----- /** - * Get the subject-version pairs identified by the input ID. - * - * Parameters: - * - * @param id (int) – the globally unique identifier of the schema - * - * Response JSON Array of Objects: - * - * subject (string) – Name of the subject - * version (int) – Version of the returned schema - * - * Status Codes: + * Get the subject-version pairs identified by the input ID. Parameters: * - * 404 Not Found – - * Error code 40403 – Schema not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * @param id (int) – the globally unique identifier of the schema Response JSON Array of Objects: subject + * (string) – Name of the subject version (int) – Version of the returned schema Status Codes: + * 404 Not Found – Error code 40403 – Schema not found 500 Internal Server Error – Error code + * 50001 – Error in the backend datastore */ @GET @Path("/ids/{id}/versions") diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectVersionsResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectVersionsResource.java index ee5282664d..16889979a9 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectVersionsResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectVersionsResource.java @@ -16,14 +16,12 @@ /** * Note: *

- * This API specification is owned by Confluent. - * - * - * + * This API + * specification is owned by Confluent. */ @Path("/apis/ccompat/v7/subjects/{subject}/versions") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface SubjectVersionsResource { // ----- Path: /subjects/{subject}/versions ----- @@ -32,202 +30,137 @@ public interface SubjectVersionsResource { * Get a list of versions registered under the specified subject. *

* Parameters: - * *

    - *
  • subject (string) – the name of the subject
  • + *
  • subject (string) – the name of the subject
  • *
- * * Response JSON Array of Objects: *
    - *
  • version (int) – version of the schema registered under this subject
  • + *
  • version (int) – version of the schema registered under this subject
  • *
- * * Status Codes: *
    - *
  • 404 Not Found - *
      - *
    • Error code 40401 – Subject not found
    • - *
    - *
  • - *
  • 500 Internal Server Error - *
      - *
    • Error code 50001 – Error in the backend datastore
    • - *
    - *
  • + *
  • 404 Not Found + *
      + *
    • Error code 40401 – Subject not found
    • + *
    + *
  • + *
  • 500 Internal Server Error + *
      + *
    • Error code 50001 – Error in the backend datastore
    • + *
    + *
  • *
*/ @GET - List listVersions(@PathParam("subject") String subject, @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) throws Exception; + List listVersions(@PathParam("subject") String subject, + @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) + throws Exception; /** - * Register a new schema under the specified subject. If successfully registered, - * this returns the unique identifier of this schema in the registry. - * The returned identifier should be used to retrieve this schema from the schemas resource - * and is different from the schema’s version which is associated with the subject. - * If the same schema is registered under a different subject, - * the same identifier will be returned. However, the version of the schema - * may be different under different subjects. - * - * A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config - * - * When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed. - * Parameters: - * - * subject (string) – Subject under which the schema will be registered - * - * Request JSON Object: - * - * @param normalize (boolean) - Add ?normalize=true at the end of this request to normalize the schema. The default is false. - * @param request – The schema string - * - * Status Codes: - * - * 409 Conflict – Incompatible schema - * 422 Unprocessable Entity – - * Error code 42201 – Invalid schema - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store - * Error code 50002 – Operation timed out - * Error code 50003 – Error while forwarding the request to the primary + * Register a new schema under the specified subject. If successfully registered, this returns the unique + * identifier of this schema in the registry. The returned identifier should be used to retrieve this + * schema from the schemas resource and is different from the schema’s version which is associated + * with the subject. If the same schema is registered under a different subject, the same identifier will + * be returned. However, the version of the schema may be different under different subjects. A schema + * should be compatible with the previously registered schema or schemas (if there are any) as per the + * configured compatibility level. The configured compatibility level can be obtained by issuing a GET + * http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are + * multiple instances of Schema Registry running in the same cluster, the schema registration request will + * be forwarded to one of the instances designated as the primary. If the primary is not available, the + * client will get an error code indicating that the forwarding has failed. Parameters: subject (string) – + * Subject under which the schema will be registered Request JSON Object: + * + * @param normalize (boolean) - Add ?normalize=true at the end of this request to normalize the schema. + * The default is false. + * @param request – The schema string Status Codes: 409 Conflict – Incompatible schema 422 Unprocessable + * Entity – Error code 42201 – Invalid schema 500 Internal Server Error – Error code 50001 – + * Error in the backend data store Error code 50002 – Operation timed out Error code 50003 – + * Error while forwarding the request to the primary */ @POST - @Authorized(style=AuthorizedStyle.ArtifactOnly) - SchemaId register( - @PathParam("subject") String subject, - @NotNull SchemaInfo request, @QueryParam("normalize") Boolean normalize, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; - + @Authorized(style = AuthorizedStyle.ArtifactOnly) + SchemaId register(@PathParam("subject") String subject, @NotNull SchemaInfo request, + @QueryParam("normalize") Boolean normalize, @HeaderParam(Headers.GROUP_ID) String groupId) + throws Exception; // ----- Path: /subjects/{subject}/versions/{version} ----- - /** - * Get a specific version of the schema registered under this subject - * Parameters: - * - * subject (string) – Name of the subject - * version (versionId) – Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string “latest”. “latest” returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served. - * - * Response JSON Object: - * - * - * subject (string) – Name of the subject that this schema is registered under - * globalId (int) – Globally unique identifier of the schema - * version (int) – Version of the returned schema - * schema (string) – The schema string - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * Error code 40402 – Version not found - * 422 Unprocessable Entity – - * Error code 42202 – Invalid version - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store + * Get a specific version of the schema registered under this subject Parameters: subject (string) – Name + * of the subject version (versionId) – Version of the schema to be returned. Valid values for versionId + * are between [1,2^31-1] or the string “latest”. “latest” returns the last registered schema under the + * specified subject. Note that there may be a new latest schema that gets registered right after this + * request is served. Response JSON Object: subject (string) – Name of the subject that this schema is + * registered under globalId (int) – Globally unique identifier of the schema version (int) – Version of + * the returned schema schema (string) – The schema string Status Codes: 404 Not Found – Error code 40401 + * – Subject not found Error code 40402 – Version not found 422 Unprocessable Entity – Error code 42202 – + * Invalid version 500 Internal Server Error – Error code 50001 – Error in the backend data store */ @GET @Path("/{version}") - Schema getSchemaByVersion( - @PathParam("subject") String subject, - @PathParam("version") String version, @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) throws Exception; + Schema getSchemaByVersion(@PathParam("subject") String subject, @PathParam("version") String version, + @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) + throws Exception; /** - * Deletes a specific version of the schema registered under this subject. - * This only deletes the version and the schema ID remains intact - * making it still possible to decode data using the schema ID. - * This API is recommended to be used only in development environments - * or under extreme circumstances where-in, its required to delete - * a previously registered schema for compatibility purposes - * or re-register previously registered schema. - * - * Parameters: - * - * @param subject (string) – Name of the subject - * @param version (versionId) – Version of the schema to be deleted. Valid values for versionId are between [1,2^31-1] or the string “latest”. “latest” deletes the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served. - * @param permanent (boolean) - Add ?permanent=true at the end of this request to specify a hard delete for a specific version of the subject, which removes all associated metadata including the schema ID. The default is false. - * - * - * Response JSON Object: - * - * - * int – Version of the deleted schema - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * Error code 40402 – Version not found - * 422 Unprocessable Entity – - * Error code 42202 – Invalid version - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store + * Deletes a specific version of the schema registered under this subject. This only deletes the version + * and the schema ID remains intact making it still possible to decode data using the schema ID. This API + * is recommended to be used only in development environments or under extreme circumstances where-in, its + * required to delete a previously registered schema for compatibility purposes or re-register previously + * registered schema. Parameters: + * + * @param subject (string) – Name of the subject + * @param version (versionId) – Version of the schema to be deleted. Valid values for versionId are + * between [1,2^31-1] or the string “latest”. “latest” deletes the last registered schema under + * the specified subject. Note that there may be a new latest schema that gets registered right + * after this request is served. + * @param permanent (boolean) - Add ?permanent=true at the end of this request to specify a hard delete + * for a specific version of the subject, which removes all associated metadata including the + * schema ID. The default is false. Response JSON Object: int – Version of the deleted schema + * Status Codes: 404 Not Found – Error code 40401 – Subject not found Error code 40402 – + * Version not found 422 Unprocessable Entity – Error code 42202 – Invalid version 500 Internal + * Server Error – Error code 50001 – Error in the backend data store */ @DELETE @Path("/{version}") - @Authorized(style=AuthorizedStyle.ArtifactOnly) - int deleteSchemaVersion( - @PathParam("subject") String subject, - @PathParam("version") String version, @QueryParam("permanent") Boolean permanent, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; + @Authorized(style = AuthorizedStyle.ArtifactOnly) + int deleteSchemaVersion(@PathParam("subject") String subject, @PathParam("version") String version, + @QueryParam("permanent") Boolean permanent, @HeaderParam(Headers.GROUP_ID) String groupId) + throws Exception; // ----- Path: /subjects/{subject}/versions/{version}/schema ----- /** * Get the schema for the specified version of this subject. The unescaped schema only is returned. - * Parameters: - * - * subject (string) – Name of the subject - * version (versionId) – Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string “latest”. “latest” returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served. - * - * Response JSON Object: - * - * - * schema (string) – The schema string (unescaped) - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * Error code 40402 – Version not found - * 422 Unprocessable Entity – - * Error code 42202 – Invalid version - * 500 Internal Server Error – - * Error code 50001 – Error in the backend data store + * Parameters: subject (string) – Name of the subject version (versionId) – Version of the schema to be + * returned. Valid values for versionId are between [1,2^31-1] or the string “latest”. “latest” returns + * the last registered schema under the specified subject. Note that there may be a new latest schema that + * gets registered right after this request is served. Response JSON Object: schema (string) – The schema + * string (unescaped) Status Codes: 404 Not Found – Error code 40401 – Subject not found Error code 40402 + * – Version not found 422 Unprocessable Entity – Error code 42202 – Invalid version 500 Internal Server + * Error – Error code 50001 – Error in the backend data store */ @GET @Path("/{version}/schema") - String getSchemaOnly( - @PathParam("subject") String subject, - @PathParam("version") String version, @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) throws Exception; + String getSchemaOnly(@PathParam("subject") String subject, @PathParam("version") String version, + @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) + throws Exception; // ----- Path: /subjects/{subject}/versions/{version}/referencedby ----- /** - * Get a list of IDs of schemas that reference the schema with the given subject and version. - * - * Parameters: - * - * subject (string) – the name of the subject - * version (versionId) – Version of the schema to be returned. - * Valid values for versionId are between [1,2^31-1] or the string “latest”. - * “latest” returns the last registered schema under the specified subject. - * Note that there may be a new latest schema that gets registered right after this request is served. - * - * Response JSON Array of Objects: - * - * id (int) – Globally unique identifier of the schema - * - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * Get a list of IDs of schemas that reference the schema with the given subject and version. Parameters: + * subject (string) – the name of the subject version (versionId) – Version of the schema to be returned. + * Valid values for versionId are between [1,2^31-1] or the string “latest”. “latest” returns the last + * registered schema under the specified subject. Note that there may be a new latest schema that gets + * registered right after this request is served. Response JSON Array of Objects: id (int) – Globally + * unique identifier of the schema Status Codes: 404 Not Found – Error code 40401 – Subject not found 500 + * Internal Server Error – Error code 50001 – Error in the backend datastore */ @GET @Path("/{version}/referencedby") - List getSchemasReferencedBy( - @PathParam("subject") String subject, @PathParam("version") String version, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; - + List getSchemasReferencedBy(@PathParam("subject") String subject, + @PathParam("version") String version, @HeaderParam(Headers.GROUP_ID) String groupId) + throws Exception; } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectsResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectsResource.java index cd1bacff1f..e0a62e774e 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectsResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/SubjectsResource.java @@ -13,89 +13,69 @@ /** * Note: *

- * This API specification is owned by Confluent. - * + * This API + * specification is owned by Confluent. */ @Path("/apis/ccompat/v7/subjects") -@Consumes({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) -@Produces({JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST}) +@Consumes({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) +@Produces({ JSON, OCTET_STREAM, COMPAT_SCHEMA_REGISTRY_V1, COMPAT_SCHEMA_REGISTRY_STABLE_LATEST }) public interface SubjectsResource { // ----- Path: /subjects ----- /** * Get a list of registered subjects. - * @param subjectPrefix (string) Add ?subjectPrefix= (as an empty string) at the end of this request to list subjects in the default context. If this flag is not included, GET /subjects returns all subjects across all contexts. - * @param deleted (boolean) Add ?deleted=true at the end of this request to list both current and soft-deleted subjects. The default is false. If this flag is not included, only current subjects are listed (not those that have been soft-deleted). Hard and soft delete are explained below in the description of the delete API. - * - * Response JSON Array of Objects: - * - * - * name (string) – Subject - * - * Status Codes: - * - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * + * @param subjectPrefix (string) Add ?subjectPrefix= (as an empty string) at the end of this request to + * list subjects in the default context. If this flag is not included, GET /subjects returns + * all subjects across all contexts. + * @param deleted (boolean) Add ?deleted=true at the end of this request to list both current and + * soft-deleted subjects. The default is false. If this flag is not included, only current + * subjects are listed (not those that have been soft-deleted). Hard and soft delete are + * explained below in the description of the delete API. Response JSON Array of Objects: name + * (string) – Subject Status Codes: 500 Internal Server Error – Error code 50001 – Error in the + * backend datastore */ @GET - List listSubjects(@QueryParam("subjectPrefix") String subjectPrefix, @QueryParam("deleted") Boolean deleted, @HeaderParam(Headers.GROUP_ID) String groupId); + List listSubjects(@QueryParam("subjectPrefix") String subjectPrefix, + @QueryParam("deleted") Boolean deleted, @HeaderParam(Headers.GROUP_ID) String groupId); // ----- Path: /subjects/{subject} ----- /** - * Check if a schema has already been registered under the specified subject. - * If so, this returns the schema string along with its globally unique identifier, - * its version under this subject and the subject name. - * Parameters: - * - * @param subject (string) – Subject under which the schema will be registered. - * @param normalize (boolean) - Add ?normalize=true at the end of this request to normalize the schema. The default is false. - * - * Response JSON Object: - * - * - * subject (string) – Name of the subject that this schema is registered under - * globalId (int) – Globally unique identifier of the schema - * version (int) – Version of the returned schema - * schema (string) – The schema string - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * Error code 40403 – Schema not found - * 500 Internal Server Error – Internal server error + * Check if a schema has already been registered under the specified subject. If so, this returns the + * schema string along with its globally unique identifier, its version under this subject and the subject + * name. Parameters: + * + * @param subject (string) – Subject under which the schema will be registered. + * @param normalize (boolean) - Add ?normalize=true at the end of this request to normalize the schema. + * The default is false. Response JSON Object: subject (string) – Name of the subject that this + * schema is registered under globalId (int) – Globally unique identifier of the schema version + * (int) – Version of the returned schema schema (string) – The schema string Status Codes: 404 + * Not Found – Error code 40401 – Subject not found Error code 40403 – Schema not found 500 + * Internal Server Error – Internal server error */ @POST @Path("/{subject}") - Schema findSchemaByContent( - @PathParam("subject") String subject, - @NotNull SchemaInfo request, @QueryParam("normalize") Boolean normalize, @HeaderParam(Headers.GROUP_ID) String groupId, @QueryParam("deleted") Boolean deleted) throws Exception; + Schema findSchemaByContent(@PathParam("subject") String subject, @NotNull SchemaInfo request, + @QueryParam("normalize") Boolean normalize, @HeaderParam(Headers.GROUP_ID) String groupId, + @QueryParam("deleted") Boolean deleted) throws Exception; /** - * Deletes the specified subject and its associated compatibility level if registered. - * It is recommended to use this API only when a topic needs to be recycled or in development environment. - * - * Parameters: - * - * @param subject (string) – the name of the subject - * @param permanent (boolean) – Add ?permanent=true at the end of this request to specify a hard delete of the subject, which removes all associated metadata including the schema ID. - * The default is false. If the flag is not included, a soft delete is performed. - * - * Response JSON Array of Objects: - * - * version (int) – version of the schema deleted under this subject - * - * Status Codes: - * - * 404 Not Found – - * Error code 40401 – Subject not found - * 500 Internal Server Error – - * Error code 50001 – Error in the backend datastore + * Deletes the specified subject and its associated compatibility level if registered. It is recommended + * to use this API only when a topic needs to be recycled or in development environment. Parameters: + * + * @param subject (string) – the name of the subject + * @param permanent (boolean) – Add ?permanent=true at the end of this request to specify a hard delete of + * the subject, which removes all associated metadata including the schema ID. The default is + * false. If the flag is not included, a soft delete is performed. Response JSON Array of + * Objects: version (int) – version of the schema deleted under this subject Status Codes: 404 + * Not Found – Error code 40401 – Subject not found 500 Internal Server Error – Error code + * 50001 – Error in the backend datastore */ @DELETE @Path("/{subject}") - List deleteSubject( - @PathParam("subject") String subject, @QueryParam("permanent") Boolean permanent, @HeaderParam(Headers.GROUP_ID) String groupId) throws Exception; + List deleteSubject(@PathParam("subject") String subject, + @QueryParam("permanent") Boolean permanent, @HeaderParam(Headers.GROUP_ID) String groupId) + throws Exception; } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/AbstractResource.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/AbstractResource.java index 079f8adea1..b837523e4b 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/AbstractResource.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/AbstractResource.java @@ -1,6 +1,5 @@ package io.apicurio.registry.ccompat.rest.v7.impl; - import io.apicurio.registry.ccompat.dto.SchemaReference; import io.apicurio.registry.ccompat.rest.error.ConflictException; import io.apicurio.registry.ccompat.rest.error.UnprocessableEntityException; @@ -64,10 +63,14 @@ public abstract class AbstractResource { @Inject ArtifactTypeUtilProviderFactory factory; - protected ArtifactVersionMetaDataDto createOrUpdateArtifact(String subject, String schema, String artifactType, List references, String groupId) { + protected ArtifactVersionMetaDataDto createOrUpdateArtifact(String subject, String schema, + String artifactType, List references, String groupId) { ArtifactVersionMetaDataDto res; final List parsedReferences = parseReferences(references, groupId); - final List artifactReferences = parsedReferences.stream().map(dto -> ArtifactReference.builder().name(dto.getName()).groupId(dto.getGroupId()).artifactId(dto.getArtifactId()).version(dto.getVersion()).build()).collect(Collectors.toList()); + final List artifactReferences = parsedReferences.stream() + .map(dto -> ArtifactReference.builder().name(dto.getName()).groupId(dto.getGroupId()) + .artifactId(dto.getArtifactId()).version(dto.getVersion()).build()) + .collect(Collectors.toList()); final Map resolvedReferences = storage.resolveReferences(parsedReferences); try { ContentHandle schemaContent; @@ -79,26 +82,23 @@ protected ArtifactVersionMetaDataDto createOrUpdateArtifact(String subject, Stri if (!doesArtifactExist(subject, groupId)) { TypedContent typedSchemaContent = TypedContent.create(schemaContent, contentType); - rulesService.applyRules(groupId, subject, artifactType, typedSchemaContent, RuleApplicationType.CREATE, artifactReferences, resolvedReferences); + rulesService.applyRules(groupId, subject, artifactType, typedSchemaContent, + RuleApplicationType.CREATE, artifactReferences, resolvedReferences); EditableArtifactMetaDataDto artifactMetaData = EditableArtifactMetaDataDto.builder().build(); - EditableVersionMetaDataDto firstVersionMetaData = EditableVersionMetaDataDto.builder().build(); - ContentWrapperDto firstVersionContent = ContentWrapperDto.builder() - .content(schemaContent) - .contentType(contentType) - .references(parsedReferences) + EditableVersionMetaDataDto firstVersionMetaData = EditableVersionMetaDataDto.builder() .build(); + ContentWrapperDto firstVersionContent = ContentWrapperDto.builder().content(schemaContent) + .contentType(contentType).references(parsedReferences).build(); res = storage.createArtifact(groupId, subject, artifactType, artifactMetaData, null, firstVersionContent, firstVersionMetaData, null).getValue(); } else { TypedContent typedSchemaContent = TypedContent.create(schemaContent, contentType); - rulesService.applyRules(groupId, subject, artifactType, typedSchemaContent, RuleApplicationType.UPDATE, artifactReferences, resolvedReferences); - ContentWrapperDto versionContent = ContentWrapperDto.builder() - .content(schemaContent) - .contentType(contentType) - .references(parsedReferences) - .build(); + rulesService.applyRules(groupId, subject, artifactType, typedSchemaContent, + RuleApplicationType.UPDATE, artifactReferences, resolvedReferences); + ContentWrapperDto versionContent = ContentWrapperDto.builder().content(schemaContent) + .contentType(contentType).references(parsedReferences).build(); res = storage.createArtifactVersion(groupId, subject, null, artifactType, versionContent, EditableVersionMetaDataDto.builder().build(), List.of()); } @@ -112,11 +112,13 @@ protected ArtifactVersionMetaDataDto createOrUpdateArtifact(String subject, Stri return res; } - protected ArtifactVersionMetaDataDto lookupSchema(String groupId, String subject, String schema, List schemaReferences, String schemaType, boolean normalize) { - //FIXME simplify logic + protected ArtifactVersionMetaDataDto lookupSchema(String groupId, String subject, String schema, + List schemaReferences, String schemaType, boolean normalize) { + // FIXME simplify logic try { final String type = schemaType == null ? ArtifactType.AVRO : schemaType; - final String contentType = type.equals(ArtifactType.PROTOBUF) ? ContentTypes.APPLICATION_PROTOBUF : ContentTypes.APPLICATION_JSON; + final String contentType = type.equals(ArtifactType.PROTOBUF) ? ContentTypes.APPLICATION_PROTOBUF + : ContentTypes.APPLICATION_JSON; TypedContent typedSchemaContent = TypedContent.create(ContentHandle.create(schema), contentType); final List artifactReferences = parseReferences(schemaReferences, groupId); ArtifactTypeUtilProvider artifactTypeProvider = factory.getArtifactTypeProvider(type); @@ -124,25 +126,30 @@ protected ArtifactVersionMetaDataDto lookupSchema(String groupId, String subject if (cconfig.canonicalHashModeEnabled.get() || normalize) { try { - amd = storage.getArtifactVersionMetaDataByContent(groupId, subject, true, typedSchemaContent, artifactReferences); + amd = storage.getArtifactVersionMetaDataByContent(groupId, subject, true, + typedSchemaContent, artifactReferences); } catch (ArtifactNotFoundException ex) { if (type.equals(ArtifactType.AVRO)) { - //When comparing using content, sometimes the references might be inlined into the content, try to dereference the existing content and compare as a fallback. See https://github.com/Apicurio/apicurio-registry/issues/3588 for more information. - //If using this method there is no matching content either, just re-throw the exception. - //This approach only works for schema types with dereference support (for now, only Avro in the ccompat API). - amd = storage.getArtifactVersions(groupId, subject) - .stream().filter(version -> { - StoredArtifactVersionDto artifactVersion = storage.getArtifactVersionContent(groupId, subject, version); - TypedContent typedArtifactVersion = TypedContent.create(artifactVersion.getContent(), artifactVersion.getContentType()); - Map artifactVersionReferences = storage.resolveReferences(artifactVersion.getReferences()); - String dereferencedExistingContentSha = DigestUtils.sha256Hex( - artifactTypeProvider.getContentDereferencer().dereference( - typedArtifactVersion, artifactVersionReferences - ).getContent().content() - ); - return dereferencedExistingContentSha.equals(DigestUtils.sha256Hex(schema)); - }) - .findAny() + // When comparing using content, sometimes the references might be inlined into the + // content, try to dereference the existing content and compare as a fallback. See + // https://github.com/Apicurio/apicurio-registry/issues/3588 for more information. + // If using this method there is no matching content either, just re-throw the + // exception. + // This approach only works for schema types with dereference support (for now, only + // Avro in the ccompat API). + amd = storage.getArtifactVersions(groupId, subject).stream().filter(version -> { + StoredArtifactVersionDto artifactVersion = storage + .getArtifactVersionContent(groupId, subject, version); + TypedContent typedArtifactVersion = TypedContent + .create(artifactVersion.getContent(), artifactVersion.getContentType()); + Map artifactVersionReferences = storage + .resolveReferences(artifactVersion.getReferences()); + String dereferencedExistingContentSha = DigestUtils + .sha256Hex(artifactTypeProvider.getContentDereferencer() + .dereference(typedArtifactVersion, artifactVersionReferences) + .getContent().content()); + return dereferencedExistingContentSha.equals(DigestUtils.sha256Hex(schema)); + }).findAny() .map(version -> storage.getArtifactVersionMetaData(groupId, subject, version)) .orElseThrow(() -> ex); } else { @@ -151,7 +158,8 @@ protected ArtifactVersionMetaDataDto lookupSchema(String groupId, String subject } } else { - amd = storage.getArtifactVersionMetaDataByContent(groupId, subject, false, typedSchemaContent, artifactReferences); + amd = storage.getArtifactVersionMetaDataByContent(groupId, subject, false, typedSchemaContent, + artifactReferences); } return amd; @@ -163,7 +171,8 @@ protected ArtifactVersionMetaDataDto lookupSchema(String groupId, String subject protected Map resolveReferences(List references) { Map resolvedReferences = Collections.emptyMap(); if (references != null && !references.isEmpty()) { - //Transform the given references into dtos and set the contentId, this will also detect if any of the passed references does not exist. + // Transform the given references into dtos and set the contentId, this will also detect if any of + // the passed references does not exist. final List referencesAsDtos = references.stream().map(schemaReference -> { final ArtifactReferenceDto artifactReferenceDto = new ArtifactReferenceDto(); artifactReferenceDto.setArtifactId(schemaReference.getSubject()); @@ -176,7 +185,7 @@ protected Map resolveReferences(List refe resolvedReferences = storage.resolveReferences(referencesAsDtos); if (references.size() > resolvedReferences.size()) { - //There are unresolvable references, which is not allowed. + // There are unresolvable references, which is not allowed. throw new UnprocessableEntityException("Unresolved reference"); } } @@ -191,7 +200,8 @@ protected boolean isArtifactActive(String subject, String groupId) { protected String getLatestArtifactVersionForSubject(String subject, String groupId) { try { - GAV latestGAV = storage.getBranchTip(new GA(groupId, subject), BranchId.LATEST, RetrievalBehavior.SKIP_DISABLED_LATEST); + GAV latestGAV = storage.getBranchTip(new GA(groupId, subject), BranchId.LATEST, + RetrievalBehavior.SKIP_DISABLED_LATEST); return latestGAV.getRawVersionId(); } catch (ArtifactNotFoundException ex) { throw new VersionNotFoundException(groupId, subject, "latest"); @@ -200,7 +210,7 @@ protected String getLatestArtifactVersionForSubject(String subject, String group protected boolean shouldFilterState(boolean deleted, VersionState state) { if (deleted) { - //if deleted is enabled, just return all states + // if deleted is enabled, just return all states return true; } else { return state.equals(VersionState.ENABLED); @@ -236,13 +246,17 @@ protected boolean doesGlobalRuleExist(RuleType type) { } } - //Parse references and resolve the contentId. This will fail with ArtifactNotFound if a reference cannot be found. + // Parse references and resolve the contentId. This will fail with ArtifactNotFound if a reference cannot + // be found. protected List parseReferences(List references, String groupId) { if (references != null) { return references.stream().map(schemaReference -> { - // Try to get the artifact version. This will fail if not found with ArtifactNotFound or VersionNotFound - storage.getArtifactVersionMetaData(groupId, schemaReference.getSubject(), String.valueOf(schemaReference.getVersion())); - return new ArtifactReferenceDto(groupId, schemaReference.getSubject(), String.valueOf(schemaReference.getVersion()), schemaReference.getName()); + // Try to get the artifact version. This will fail if not found with ArtifactNotFound or + // VersionNotFound + storage.getArtifactVersionMetaData(groupId, schemaReference.getSubject(), + String.valueOf(schemaReference.getVersion())); + return new ArtifactReferenceDto(groupId, schemaReference.getSubject(), + String.valueOf(schemaReference.getVersion()), schemaReference.getName()); }).collect(Collectors.toList()); } else { return Collections.emptyList(); @@ -250,20 +264,21 @@ protected List parseReferences(List refer } protected boolean isCcompatManagedType(String artifactType) { - return artifactType.equals(ArtifactType.AVRO) || artifactType.equals(ArtifactType.PROTOBUF) || artifactType.equals(ArtifactType.JSON); + return artifactType.equals(ArtifactType.AVRO) || artifactType.equals(ArtifactType.PROTOBUF) + || artifactType.equals(ArtifactType.JSON); } /** - * Given a version string: - * - if it's a non-negative integer, use that; - * - if it's a string "latest", find out and use the subject's (artifact's) latest version; - * - if it's -1, do the same as "latest", even though this behavior is undocumented. - * See https://github.com/Apicurio/apicurio-registry/issues/2851 - * - otherwise throw an IllegalArgumentException. - * On success, call the "then" function with the parsed version (MUST NOT be null) and return it's result. - * Optionally provide an "else" function that will receive the exception that would be otherwise thrown. + * Given a version string: - if it's a non-negative integer, use that; - if it's a string "latest", + * find out and use the subject's (artifact's) latest version; - if it's -1, do the same as + * "latest", even though this behavior is undocumented. See + * https://github.com/Apicurio/apicurio-registry/issues/2851 - otherwise throw an + * IllegalArgumentException. On success, call the "then" function with the parsed version (MUST NOT be + * null) and return it's result. Optionally provide an "else" function that will receive the exception + * that would be otherwise thrown. */ - protected T parseVersionString(String subject, String versionString, String groupId, Function then) { + protected T parseVersionString(String subject, String versionString, String groupId, + Function then) { String version; if ("latest".equals(versionString)) { version = getLatestArtifactVersionForSubject(subject, groupId); @@ -284,4 +299,3 @@ protected T parseVersionString(String subject, String versionString, String return then.apply(version); } } - diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ApiConverter.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ApiConverter.java index a081705be7..b2578f9ca0 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ApiConverter.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ApiConverter.java @@ -1,8 +1,5 @@ package io.apicurio.registry.ccompat.rest.v7.impl; -import jakarta.inject.Inject; -import jakarta.inject.Singleton; - import io.apicurio.registry.ccompat.dto.Schema; import io.apicurio.registry.ccompat.dto.SchemaInfo; import io.apicurio.registry.ccompat.dto.SchemaReference; @@ -10,6 +7,8 @@ import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.storage.dto.ArtifactReferenceDto; import io.apicurio.registry.storage.dto.StoredArtifactVersionDto; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; import java.util.List; import java.util.stream.Collectors; @@ -33,17 +32,17 @@ public Schema convert(String subject, StoredArtifactVersionDto storedArtifact) { public Schema convert(String subject, StoredArtifactVersionDto storedArtifact, String artifactType) { return new Schema( - convertUnsigned(cconfig.legacyIdModeEnabled.get() ? storedArtifact.getGlobalId() : storedArtifact.getContentId()), - subject, - convertUnsigned(storedArtifact.getVersionOrder()), - storedArtifact.getContent().content(), - artifactType, - storedArtifact.getReferences().stream().map(this::convert).collect(Collectors.toList()) - ); + convertUnsigned(cconfig.legacyIdModeEnabled.get() ? storedArtifact.getGlobalId() + : storedArtifact.getContentId()), + subject, convertUnsigned(storedArtifact.getVersionOrder()), + storedArtifact.getContent().content(), artifactType, + storedArtifact.getReferences().stream().map(this::convert).collect(Collectors.toList())); } - public SchemaInfo convert(ContentHandle content, String artifactType, List references) { - return new SchemaInfo(content.content(), artifactType, references.stream().map(this::convert).collect(Collectors.toList())); + public SchemaInfo convert(ContentHandle content, String artifactType, + List references) { + return new SchemaInfo(content.content(), artifactType, + references.stream().map(this::convert).collect(Collectors.toList())); } public SubjectVersion convert(String artifactId, Number version) { @@ -51,6 +50,7 @@ public SubjectVersion convert(String artifactId, Number version) { } public SchemaReference convert(ArtifactReferenceDto reference) { - return new SchemaReference(reference.getName(), reference.getArtifactId(), Integer.parseInt(reference.getVersion())); + return new SchemaReference(reference.getName(), reference.getArtifactId(), + Integer.parseInt(reference.getVersion())); } } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CCompatConfig.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CCompatConfig.java index a3d4dadfbb..e693e783e0 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CCompatConfig.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CCompatConfig.java @@ -1,18 +1,16 @@ package io.apicurio.registry.ccompat.rest.v7.impl; -import java.util.function.Supplier; - +import io.apicurio.common.apps.config.Dynamic; +import io.apicurio.common.apps.config.Info; import jakarta.inject.Singleton; - import org.eclipse.microprofile.config.inject.ConfigProperty; -import io.apicurio.common.apps.config.Dynamic; -import io.apicurio.common.apps.config.Info; +import java.util.function.Supplier; @Singleton public class CCompatConfig { - @Dynamic(label = "Legacy ID mode (compatibility API)", description = "When selected, the Schema Registry compatibility API uses global ID instead of content ID for artifact identifiers.") + @Dynamic(label = "Legacy ID mode (compatibility API)", description = "When selected, the Schema Registry compatibility API uses global ID instead of content ID for artifact identifiers.") @ConfigProperty(name = "apicurio.ccompat.legacy-id-mode.enabled", defaultValue = "false") @Info(category = "ccompat", description = "Legacy ID mode (compatibility API)", availableSince = "2.0.2.Final") Supplier legacyIdModeEnabled; @@ -22,7 +20,7 @@ public class CCompatConfig { @Info(category = "ccompat", description = "Canonical hash mode (compatibility API)", availableSince = "2.3.0.Final") Supplier canonicalHashModeEnabled; - @Dynamic(label = "Maximum number of Subjects returned (compatibility API)", description = "Determines the maximum number of Subjects that will be returned by the ccompat API (for the '/subjects' endpoint).") + @Dynamic(label = "Maximum number of Subjects returned (compatibility API)", description = "Determines the maximum number of Subjects that will be returned by the ccompat API (for the '/subjects' endpoint).") @ConfigProperty(name = "apicurio.ccompat.max-subjects", defaultValue = "1000") @Info(category = "ccompat", description = "Maximum number of Subjects returned (compatibility API)", availableSince = "2.4.2.Final") Supplier maxSubjects; diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CompatibilityResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CompatibilityResourceImpl.java index 570da62249..88a313c1c7 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CompatibilityResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/CompatibilityResourceImpl.java @@ -22,24 +22,27 @@ import java.util.Collections; import java.util.List; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class CompatibilityResourceImpl extends AbstractResource implements CompatibilityResource { @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) - public CompatibilityCheckResponse testCompatibilityBySubjectName(String subject, SchemaContent request, Boolean verbose, String groupId) throws Exception { + public CompatibilityCheckResponse testCompatibilityBySubjectName(String subject, SchemaContent request, + Boolean verbose, String groupId) throws Exception { final boolean fverbose = verbose == null ? Boolean.FALSE : verbose; try { final List versions = storage.getArtifactVersions(groupId, subject); for (String version : versions) { - final ArtifactVersionMetaDataDto artifactVersionMetaData = storage.getArtifactVersionMetaData(groupId, subject, version); + final ArtifactVersionMetaDataDto artifactVersionMetaData = storage + .getArtifactVersionMetaData(groupId, subject, version); // Assume the content type of the SchemaContent is the same as the previous version. String contentType = ContentTypes.APPLICATION_JSON; if (artifactVersionMetaData.getArtifactType().equals(ArtifactType.PROTOBUF)) { contentType = ContentTypes.APPLICATION_PROTOBUF; } - TypedContent typedContent = TypedContent.create(ContentHandle.create(request.getSchema()), contentType); + TypedContent typedContent = TypedContent.create(ContentHandle.create(request.getSchema()), + contentType); rulesService.applyRules(groupId, subject, version, artifactVersionMetaData.getArtifactType(), typedContent, Collections.emptyList(), Collections.emptyMap()); } @@ -57,20 +60,23 @@ public CompatibilityCheckResponse testCompatibilityBySubjectName(String subject, @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) - public CompatibilityCheckResponse testCompatibilityByVersion(String subject, String versionString, SchemaContent request, Boolean verbose, String groupId) throws Exception { + public CompatibilityCheckResponse testCompatibilityByVersion(String subject, String versionString, + SchemaContent request, Boolean verbose, String groupId) throws Exception { final boolean fverbose = verbose == null ? Boolean.FALSE : verbose; return parseVersionString(subject, versionString, groupId, v -> { try { - final ArtifactVersionMetaDataDto artifact = storage.getArtifactVersionMetaData(groupId, subject, v); + final ArtifactVersionMetaDataDto artifact = storage.getArtifactVersionMetaData(groupId, + subject, v); // Assume the content type of the SchemaContent is correct based on the artifact type. String contentType = ContentTypes.APPLICATION_JSON; if (artifact.getArtifactType().equals(ArtifactType.PROTOBUF)) { contentType = ContentTypes.APPLICATION_PROTOBUF; } - TypedContent typedContent = TypedContent.create(ContentHandle.create(request.getSchema()), contentType); - rulesService.applyRules(groupId, subject, v, artifact.getArtifactType(), - typedContent, Collections.emptyList(), Collections.emptyMap()); + TypedContent typedContent = TypedContent.create(ContentHandle.create(request.getSchema()), + contentType); + rulesService.applyRules(groupId, subject, v, artifact.getArtifactType(), typedContent, + Collections.emptyList(), Collections.emptyMap()); return CompatibilityCheckResponse.IS_COMPATIBLE; } catch (RuleViolationException ex) { if (fverbose) { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ConfigResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ConfigResourceImpl.java index 42df2d5e66..96e376d6c9 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ConfigResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ConfigResourceImpl.java @@ -22,7 +22,7 @@ import java.util.Optional; import java.util.function.Supplier; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class ConfigResourceImpl extends AbstractResource implements ConfigResource { @@ -30,19 +30,15 @@ private CompatibilityLevelParamDto getCompatibilityLevel(Supplier supply try { // We're assuming the configuration == compatibility level // TODO make it more explicit - return new CompatibilityLevelParamDto(Optional.of( - CompatibilityLevel.valueOf( - supplyLevel.get() - ) - ).get().name()); + return new CompatibilityLevelParamDto( + Optional.of(CompatibilityLevel.valueOf(supplyLevel.get())).get().name()); } catch (RuleNotFoundException ex) { return new CompatibilityLevelParamDto(CompatibilityLevelDto.Level.NONE.name()); } } private void updateCompatibilityLevel(CompatibilityLevelDto.Level level, - Runnable1Ex updater, - RunnableEx deleter) throws X { + Runnable1Ex updater, RunnableEx deleter) throws X { if (level == CompatibilityLevelDto.Level.NONE) { // delete the rule deleter.run(); @@ -53,8 +49,10 @@ private void updateCompatibilityLevel(CompatibilityLevelDt } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Illegal compatibility level: " + levelString); } - updater.run(RuleConfigurationDto.builder() - .configuration(levelString).build()); // TODO config should take CompatibilityLevel as param + updater.run(RuleConfigurationDto.builder().configuration(levelString).build()); // TODO config + // should take + // CompatibilityLevel + // as param } } @@ -65,55 +63,53 @@ public CompatibilityLevelParamDto getGlobalCompatibilityLevel() { } @Override - @Audited(extractParameters = {"0", AuditingConstants.KEY_RULE}) + @Audited(extractParameters = { "0", AuditingConstants.KEY_RULE }) @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public CompatibilityLevelDto updateGlobalCompatibilityLevel(CompatibilityLevelDto request) { - updateCompatibilityLevel(request.getCompatibility(), - dto -> { - if (!doesGlobalRuleExist(RuleType.COMPATIBILITY)) { - storage.createGlobalRule(RuleType.COMPATIBILITY, dto); - } else { - storage.updateGlobalRule(RuleType.COMPATIBILITY, dto); - } - }, - () -> storage.deleteGlobalRule(RuleType.COMPATIBILITY)); + updateCompatibilityLevel(request.getCompatibility(), dto -> { + if (!doesGlobalRuleExist(RuleType.COMPATIBILITY)) { + storage.createGlobalRule(RuleType.COMPATIBILITY, dto); + } else { + storage.updateGlobalRule(RuleType.COMPATIBILITY, dto); + } + }, () -> storage.deleteGlobalRule(RuleType.COMPATIBILITY)); return request; } @Override - @Audited(extractParameters = {"0", AuditingConstants.KEY_ARTIFACT_ID, "1", AuditingConstants.KEY_RULE}) + @Audited(extractParameters = { "0", AuditingConstants.KEY_ARTIFACT_ID, "1", AuditingConstants.KEY_RULE }) @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) - public CompatibilityLevelDto updateSubjectCompatibilityLevel(String subject, CompatibilityLevelDto request, String groupId) { - updateCompatibilityLevel(request.getCompatibility(), - dto -> { - if (!doesArtifactRuleExist(subject, RuleType.COMPATIBILITY, groupId)) { - storage.createArtifactRule(groupId, subject, RuleType.COMPATIBILITY, dto); - } else { - storage.updateArtifactRule(groupId, subject, RuleType.COMPATIBILITY, dto); - } - }, - () -> { - try { - storage.deleteArtifactRule(groupId, subject, RuleType.COMPATIBILITY); - } catch (RuleNotFoundException e) { - //Ignore, fail only when the artifact is not found - } - }); + public CompatibilityLevelDto updateSubjectCompatibilityLevel(String subject, + CompatibilityLevelDto request, String groupId) { + updateCompatibilityLevel(request.getCompatibility(), dto -> { + if (!doesArtifactRuleExist(subject, RuleType.COMPATIBILITY, groupId)) { + storage.createArtifactRule(groupId, subject, RuleType.COMPATIBILITY, dto); + } else { + storage.updateArtifactRule(groupId, subject, RuleType.COMPATIBILITY, dto); + } + }, () -> { + try { + storage.deleteArtifactRule(groupId, subject, RuleType.COMPATIBILITY); + } catch (RuleNotFoundException e) { + // Ignore, fail only when the artifact is not found + } + }); return request; } @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Read) public CompatibilityLevelParamDto getSubjectCompatibilityLevel(String subject, String groupId) { - return getCompatibilityLevel(() -> storage.getArtifactRule(groupId, subject, RuleType.COMPATIBILITY).getConfiguration()); + return getCompatibilityLevel( + () -> storage.getArtifactRule(groupId, subject, RuleType.COMPATIBILITY).getConfiguration()); } @Override - @Audited(extractParameters = {"0", AuditingConstants.KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", AuditingConstants.KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) public CompatibilityLevelParamDto deleteSubjectCompatibility(String subject, String groupId) { - final CompatibilityLevelParamDto compatibilityLevel = getCompatibilityLevel(() -> - storage.getArtifactRule(groupId, subject, RuleType.COMPATIBILITY).getConfiguration()); + final CompatibilityLevelParamDto compatibilityLevel = getCompatibilityLevel( + () -> storage.getArtifactRule(groupId, subject, RuleType.COMPATIBILITY).getConfiguration()); if (!CompatibilityLevel.NONE.name().equals(compatibilityLevel.getCompatibilityLevel())) { storage.deleteArtifactRule(groupId, subject, RuleType.COMPATIBILITY); } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ContextResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ContextResourceImpl.java index fd18d2c7c3..8afcf53a89 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ContextResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ContextResourceImpl.java @@ -4,11 +4,11 @@ import io.apicurio.registry.ccompat.rest.v7.ContextResource; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; - import jakarta.interceptor.Interceptors; + import java.util.List; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class ContextResourceImpl extends AbstractResource implements ContextResource { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ExporterResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ExporterResourceImpl.java index 99aaee44bb..faed330f4a 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ExporterResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ExporterResourceImpl.java @@ -7,12 +7,12 @@ import io.apicurio.registry.ccompat.rest.v7.ExporterResource; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; - import jakarta.interceptor.Interceptors; + import java.util.List; import java.util.Map; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class ExporterResourceImpl extends AbstractResource implements ExporterResource { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ModeResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ModeResourceImpl.java index 8888b51f6e..785386da3b 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ModeResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/ModeResourceImpl.java @@ -6,10 +6,9 @@ import io.apicurio.registry.ccompat.rest.v7.ModeResource; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; - import jakarta.interceptor.Interceptors; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class ModeResourceImpl extends AbstractResource implements ModeResource { diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SchemasResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SchemasResourceImpl.java index e0f18fd49c..0fac97e67b 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SchemasResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SchemasResourceImpl.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.stream.Collectors; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SchemasResourceImpl extends AbstractResource implements SchemasResource { @@ -63,12 +63,13 @@ public List getSubjectVersions(int id, Boolean fdeleted) { boolean deleted = fdeleted != null && fdeleted; if (cconfig.legacyIdModeEnabled.get()) { ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData((long) id); - return Collections.singletonList(converter.convert(metaData.getArtifactId(), metaData.getVersionOrder())); + return Collections + .singletonList(converter.convert(metaData.getArtifactId(), metaData.getVersionOrder())); } - return storage.getArtifactVersionsByContentId(id) - .stream() + return storage.getArtifactVersionsByContentId(id).stream() .filter(versionMetaData -> deleted || versionMetaData.getState() != VersionState.DISABLED) - .map(versionMetaData -> converter.convert(versionMetaData.getArtifactId(), versionMetaData.getVersionOrder())) + .map(versionMetaData -> converter.convert(versionMetaData.getArtifactId(), + versionMetaData.getVersionOrder())) .collect(Collectors.toList()); } } diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectVersionsResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectVersionsResourceImpl.java index 2411241619..ac690f5e9a 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectVersionsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectVersionsResourceImpl.java @@ -15,6 +15,7 @@ import io.apicurio.registry.ccompat.rest.v7.SubjectVersionsResource; import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.content.TypedContent; +import io.apicurio.registry.content.util.ContentTypeUtil; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; import io.apicurio.registry.storage.dto.ArtifactVersionMetaDataDto; @@ -25,7 +26,6 @@ import io.apicurio.registry.storage.error.VersionNotFoundException; import io.apicurio.registry.types.VersionState; import io.apicurio.registry.util.ArtifactTypeUtil; -import io.apicurio.registry.content.util.ContentTypeUtil; import io.apicurio.registry.utils.VersionUtil; import jakarta.inject.Inject; import jakarta.interceptor.Interceptors; @@ -40,7 +40,7 @@ import static io.apicurio.registry.storage.RegistryStorage.RetrievalBehavior.DEFAULT; import static io.apicurio.registry.storage.RegistryStorage.RetrievalBehavior.SKIP_DISABLED_LATEST; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SubjectVersionsResourceImpl extends AbstractResource implements SubjectVersionsResource { @@ -53,9 +53,12 @@ public List listVersions(String subject, String groupId, Boolean delete final boolean fdeleted = deleted == null ? Boolean.FALSE : deleted; List rval; if (fdeleted) { - rval = storage.getArtifactVersions(groupId, subject, DEFAULT).stream().map(VersionUtil::toLong).map(converter::convertUnsigned).sorted().collect(Collectors.toList()); + rval = storage.getArtifactVersions(groupId, subject, DEFAULT).stream().map(VersionUtil::toLong) + .map(converter::convertUnsigned).sorted().collect(Collectors.toList()); } else { - rval = storage.getArtifactVersions(groupId, subject, SKIP_DISABLED_LATEST).stream().map(VersionUtil::toLong).map(converter::convertUnsigned).sorted().collect(Collectors.toList()); + rval = storage.getArtifactVersions(groupId, subject, SKIP_DISABLED_LATEST).stream() + .map(VersionUtil::toLong).map(converter::convertUnsigned).sorted() + .collect(Collectors.toList()); } if (rval.isEmpty()) { throw new ArtifactNotFoundException(groupId, subject); @@ -64,13 +67,14 @@ public List listVersions(String subject, String groupId, Boolean delete } @Override - @Audited(extractParameters = {"0", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) - public SchemaId register(String subject, SchemaInfo request, Boolean normalize, String groupId) throws Exception { + public SchemaId register(String subject, SchemaInfo request, Boolean normalize, String groupId) + throws Exception { final boolean fnormalize = normalize == null ? Boolean.FALSE : normalize; // Check to see if this content is already registered - return the global ID of that content - // if it exists. If not, then register the new content. + // if it exists. If not, then register the new content. long sid = -1; boolean idFound = false; if (null == request) { @@ -80,7 +84,8 @@ public SchemaId register(String subject, SchemaInfo request, Boolean normalize, final Map resolvedReferences = resolveReferences(request.getReferences()); try { - ArtifactVersionMetaDataDto dto = lookupSchema(groupId, subject, request.getSchema(), request.getReferences(), request.getSchemaType(), fnormalize); + ArtifactVersionMetaDataDto dto = lookupSchema(groupId, subject, request.getSchema(), + request.getReferences(), request.getSchemaType(), fnormalize); if (dto.getState().equals(VersionState.DISABLED)) { throw new ArtifactNotFoundException(groupId, subject); } @@ -97,16 +102,19 @@ public SchemaId register(String subject, SchemaInfo request, Boolean normalize, TypedContent typedSchemaContent = TypedContent.create(schemaContent, contentType); // We validate the schema at creation time by inferring the type from the content - final String artifactType = ArtifactTypeUtil.determineArtifactType(typedSchemaContent, - null, resolvedReferences, factory); + final String artifactType = ArtifactTypeUtil.determineArtifactType(typedSchemaContent, null, + resolvedReferences, factory); if (request.getSchemaType() != null && !artifactType.equals(request.getSchemaType())) { - throw new UnprocessableEntityException(String.format("Given schema is not from type: %s", request.getSchemaType())); + throw new UnprocessableEntityException( + String.format("Given schema is not from type: %s", request.getSchemaType())); } - ArtifactVersionMetaDataDto artifactMeta = createOrUpdateArtifact(subject, request.getSchema(), artifactType, request.getReferences(), groupId); - sid = cconfig.legacyIdModeEnabled.get() ? artifactMeta.getGlobalId() : artifactMeta.getContentId(); + ArtifactVersionMetaDataDto artifactMeta = createOrUpdateArtifact(subject, request.getSchema(), + artifactType, request.getReferences(), groupId); + sid = cconfig.legacyIdModeEnabled.get() ? artifactMeta.getGlobalId() + : artifactMeta.getContentId(); } catch (InvalidArtifactTypeException ex) { - //If no artifact type can be inferred, throw invalid schema ex + // If no artifact type can be inferred, throw invalid schema ex throw new UnprocessableEntityException(ex.getMessage()); } } @@ -117,27 +125,34 @@ public SchemaId register(String subject, SchemaInfo request, Boolean normalize, @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Read) - public Schema getSchemaByVersion(String subject, String version, String groupId, Boolean deleted) throws Exception { + public Schema getSchemaByVersion(String subject, String version, String groupId, Boolean deleted) + throws Exception { final boolean fdeleted = deleted == null ? Boolean.FALSE : deleted; return getSchema(groupId, subject, version, fdeleted); } @Override - @Audited(extractParameters = {"0", KEY_ARTIFACT_ID, "1", KEY_VERSION}) + @Audited(extractParameters = { "0", KEY_ARTIFACT_ID, "1", KEY_VERSION }) @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) - public int deleteSchemaVersion(String subject, String versionString, Boolean permanent, String groupId) throws Exception { + public int deleteSchemaVersion(String subject, String versionString, Boolean permanent, String groupId) + throws Exception { try { if (doesArtifactExist(subject, groupId)) { final boolean fpermanent = permanent == null ? Boolean.FALSE : permanent; return VersionUtil.toInteger(parseVersionString(subject, versionString, groupId, version -> { - List globalIdsReferencingSchema = storage.getGlobalIdsReferencingArtifactVersion(groupId, subject, version); - ArtifactVersionMetaDataDto avmd = storage.getArtifactVersionMetaData(groupId, subject, version); - if (globalIdsReferencingSchema.isEmpty() || areAllSchemasDisabled(globalIdsReferencingSchema)) { - return processDeleteVersion(subject, versionString, groupId, version, fpermanent, avmd); + List globalIdsReferencingSchema = storage + .getGlobalIdsReferencingArtifactVersion(groupId, subject, version); + ArtifactVersionMetaDataDto avmd = storage.getArtifactVersionMetaData(groupId, subject, + version); + if (globalIdsReferencingSchema.isEmpty() + || areAllSchemasDisabled(globalIdsReferencingSchema)) { + return processDeleteVersion(subject, versionString, groupId, version, fpermanent, + avmd); } else { - //There are other schemas referencing this one, it cannot be deleted. - throw new ReferenceExistsException(String.format("There are subjects referencing %s", subject)); + // There are other schemas referencing this one, it cannot be deleted. + throw new ReferenceExistsException( + String.format("There are subjects referencing %s", subject)); } })); @@ -152,8 +167,10 @@ public int deleteSchemaVersion(String subject, String versionString, Boolean per private String processDeleteVersion(String subject, String versionString, String groupId, String version, boolean fpermanent, ArtifactVersionMetaDataDto avmd) { if (fpermanent) { - if (avmd.getState().equals(VersionState.ENABLED) || avmd.getState().equals(VersionState.DEPRECATED)) { - throw new SchemaNotSoftDeletedException(String.format("Subject %s version %s must be soft deleted first", subject, versionString)); + if (avmd.getState().equals(VersionState.ENABLED) + || avmd.getState().equals(VersionState.DEPRECATED)) { + throw new SchemaNotSoftDeletedException(String + .format("Subject %s version %s must be soft deleted first", subject, versionString)); } else if (avmd.getState().equals(VersionState.DISABLED)) { storage.deleteArtifactVersion(groupId, subject, version); } @@ -162,8 +179,7 @@ private String processDeleteVersion(String subject, String versionString, String throw new SchemaSoftDeletedException("Schema is already soft deleted"); } else { EditableVersionMetaDataDto emd = EditableVersionMetaDataDto.builder() - .state(VersionState.DISABLED) - .build(); + .state(VersionState.DISABLED).build(); storage.updateArtifactVersionMetaData(groupId, subject, version, emd); } } @@ -172,27 +188,33 @@ private String processDeleteVersion(String subject, String versionString, String @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Read) - public String getSchemaOnly(String subject, String version, String groupId, Boolean deleted) throws Exception { + public String getSchemaOnly(String subject, String version, String groupId, Boolean deleted) + throws Exception { final boolean fdeleted = deleted == null ? Boolean.FALSE : deleted; return getSchema(groupId, subject, version, fdeleted).getSchema(); } @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Read) - public List getSchemasReferencedBy(String subject, String versionString, String groupId) throws Exception { + public List getSchemasReferencedBy(String subject, String versionString, String groupId) + throws Exception { if (cconfig.legacyIdModeEnabled.get()) { - return parseVersionString(subject, versionString, groupId, version -> storage.getGlobalIdsReferencingArtifactVersion(groupId, subject, version)); + return parseVersionString(subject, versionString, groupId, + version -> storage.getGlobalIdsReferencingArtifactVersion(groupId, subject, version)); } - return parseVersionString(subject, versionString, groupId, version -> storage.getContentIdsReferencingArtifactVersion(groupId, subject, version)); + return parseVersionString(subject, versionString, groupId, + version -> storage.getContentIdsReferencingArtifactVersion(groupId, subject, version)); } protected Schema getSchema(String groupId, String subject, String versionString, boolean deleted) { if (doesArtifactExist(subject, groupId) && isArtifactActive(subject, groupId)) { return parseVersionString(subject, versionString, groupId, version -> { - ArtifactVersionMetaDataDto amd = storage.getArtifactVersionMetaData(groupId, subject, version); + ArtifactVersionMetaDataDto amd = storage.getArtifactVersionMetaData(groupId, subject, + version); if (amd.getState() != VersionState.DISABLED || deleted) { - StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(groupId, subject, amd.getVersion()); + StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(groupId, + subject, amd.getVersion()); return converter.convert(subject, storedArtifact, amd.getArtifactType()); } else { throw new VersionNotFoundException(groupId, subject, version); diff --git a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectsResourceImpl.java b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectsResourceImpl.java index 6d3cf3885f..15916d8540 100644 --- a/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/ccompat/rest/v7/impl/SubjectsResourceImpl.java @@ -1,12 +1,5 @@ package io.apicurio.registry.ccompat.rest.v7.impl; -import static io.apicurio.common.apps.logging.audit.AuditingConstants.KEY_ARTIFACT_ID; - -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - import io.apicurio.common.apps.logging.Logged; import io.apicurio.common.apps.logging.audit.Audited; import io.apicurio.registry.auth.Authorized; @@ -35,89 +28,108 @@ import io.apicurio.registry.utils.VersionUtil; import jakarta.interceptor.Interceptors; -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static io.apicurio.common.apps.logging.audit.AuditingConstants.KEY_ARTIFACT_ID; + +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SubjectsResourceImpl extends AbstractResource implements SubjectsResource { @Override @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public List listSubjects(String subjectPrefix, Boolean deleted, String groupId) { - //Since contexts are not supported, subjectPrefix is not used + // Since contexts are not supported, subjectPrefix is not used final boolean fdeleted = deleted == null ? Boolean.FALSE : deleted; Set filters = new HashSet<>(Set.of(SearchFilter.ofGroupId(groupId))); if (!fdeleted) { filters.add(SearchFilter.ofState(VersionState.DISABLED).negated()); } - ArtifactSearchResultsDto searchResults = storage.searchArtifacts(filters, - OrderBy.createdOn, OrderDirection.asc, 0, cconfig.maxSubjects.get()); + ArtifactSearchResultsDto searchResults = storage.searchArtifacts(filters, OrderBy.createdOn, + OrderDirection.asc, 0, cconfig.maxSubjects.get()); return searchResults.getArtifacts().stream() - .filter(searchedArtifactDto -> isCcompatManagedType(searchedArtifactDto.getArtifactType()) /* && shouldFilterState(fdeleted, searchedArtifactDto.getState())*/) - .map(SearchedArtifactDto::getArtifactId) - .collect(Collectors.toList()); + .filter(searchedArtifactDto -> isCcompatManagedType(searchedArtifactDto + .getArtifactType()) /* + * && shouldFilterState(fdeleted, searchedArtifactDto.getState()) + */) + .map(SearchedArtifactDto::getArtifactId).collect(Collectors.toList()); } @Override @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Read) - public Schema findSchemaByContent(String subject, SchemaInfo request, Boolean normalize, String groupId, Boolean deleted) throws Exception { + public Schema findSchemaByContent(String subject, SchemaInfo request, Boolean normalize, String groupId, + Boolean deleted) throws Exception { if (doesArtifactExist(subject, groupId)) { final boolean fnormalize = normalize == null ? Boolean.FALSE : normalize; final boolean fdeleted = deleted == null ? Boolean.FALSE : deleted; try { ArtifactVersionMetaDataDto amd; - amd = lookupSchema(groupId, subject, request.getSchema(), request.getReferences(), request.getSchemaType(), fnormalize); + amd = lookupSchema(groupId, subject, request.getSchema(), request.getReferences(), + request.getSchemaType(), fnormalize); if (amd.getState() != VersionState.DISABLED || fdeleted) { - StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(groupId, subject, amd.getVersion()); + StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(groupId, + subject, amd.getVersion()); return converter.convert(subject, storedArtifact); } else { - throw new SchemaNotFoundException(String.format("The given schema does not match any schema under the subject %s", subject)); + throw new SchemaNotFoundException(String.format( + "The given schema does not match any schema under the subject %s", subject)); } } catch (ArtifactNotFoundException anf) { - throw new SchemaNotFoundException(String.format("The given schema does not match any schema under the subject %s", subject)); + throw new SchemaNotFoundException(String + .format("The given schema does not match any schema under the subject %s", subject)); } } else { - //If the artifact does not exist there is no need for looking up the schema, just fail. + // If the artifact does not exist there is no need for looking up the schema, just fail. throw new ArtifactNotFoundException(groupId, subject); } } @Override - @Audited(extractParameters = {"0", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.ArtifactOnly, level = AuthorizedLevel.Write) public List deleteSubject(String subject, Boolean permanent, String groupId) throws Exception { // This will throw an exception if the artifact does not exist. storage.getArtifactMetaData(groupId, subject); - + final boolean fpermanent = permanent == null ? Boolean.FALSE : permanent; if (fpermanent) { return deleteSubjectPermanent(groupId, subject); } else if (isArtifactActive(subject, groupId)) { return deleteSubjectVersions(groupId, subject); } else { - //The artifact exist, it's in DISABLED state but the delete request is set to not permanent, throw ex. - throw new SubjectSoftDeletedException(String.format("Subject %s is in soft deleted state.", subject)); + // The artifact exist, it's in DISABLED state but the delete request is set to not permanent, + // throw ex. + throw new SubjectSoftDeletedException( + String.format("Subject %s is in soft deleted state.", subject)); } } private List deleteSubjectPermanent(String groupId, String subject) { if (isArtifactActive(subject, groupId)) { - throw new SubjectNotSoftDeletedException(String.format("Subject %s must be soft deleted first", subject)); + throw new SubjectNotSoftDeletedException( + String.format("Subject %s must be soft deleted first", subject)); } else { - return storage.deleteArtifact(groupId, subject).stream().map(VersionUtil::toInteger).map(converter::convertUnsigned).collect(Collectors.toList()); + return storage.deleteArtifact(groupId, subject).stream().map(VersionUtil::toInteger) + .map(converter::convertUnsigned).collect(Collectors.toList()); } } - //Deleting artifact versions means updating all the versions status to DISABLED. + // Deleting artifact versions means updating all the versions status to DISABLED. private List deleteSubjectVersions(String groupId, String subject) { List deletedVersions = storage.getArtifactVersions(groupId, subject); try { - EditableVersionMetaDataDto dto = EditableVersionMetaDataDto.builder() - .state(VersionState.DISABLED) + EditableVersionMetaDataDto dto = EditableVersionMetaDataDto.builder().state(VersionState.DISABLED) .build(); - deletedVersions.forEach(version -> storage.updateArtifactVersionMetaData(groupId, subject, version, dto)); + deletedVersions.forEach( + version -> storage.updateArtifactVersionMetaData(groupId, subject, version, dto)); } catch (InvalidArtifactStateException | InvalidVersionStateException ignored) { log.warn("Invalid artifact state transition", ignored); } - return deletedVersions.stream().map(VersionUtil::toLong).map(converter::convertUnsigned).sorted().collect(Collectors.toList()); + return deletedVersions.stream().map(VersionUtil::toLong).map(converter::convertUnsigned).sorted() + .collect(Collectors.toList()); } } diff --git a/app/src/main/java/io/apicurio/registry/config/RegistryConfigStorageAccessor.java b/app/src/main/java/io/apicurio/registry/config/RegistryConfigStorageAccessor.java index a4de071e95..520202ac17 100644 --- a/app/src/main/java/io/apicurio/registry/config/RegistryConfigStorageAccessor.java +++ b/app/src/main/java/io/apicurio/registry/config/RegistryConfigStorageAccessor.java @@ -1,12 +1,11 @@ package io.apicurio.registry.config; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; - import io.apicurio.common.apps.config.DynamicConfigStorage; import io.apicurio.common.apps.config.DynamicConfigStorageAccessor; import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.types.Current; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; @ApplicationScoped public class RegistryConfigStorageAccessor implements DynamicConfigStorageAccessor { diff --git a/app/src/main/java/io/apicurio/registry/config/RegistryStorageConfigCache.java b/app/src/main/java/io/apicurio/registry/config/RegistryStorageConfigCache.java index 3a77e76d54..df5baefabe 100644 --- a/app/src/main/java/io/apicurio/registry/config/RegistryStorageConfigCache.java +++ b/app/src/main/java/io/apicurio/registry/config/RegistryStorageConfigCache.java @@ -20,7 +20,8 @@ import static io.quarkus.scheduler.Scheduled.ConcurrentExecution.SKIP; @ApplicationScoped -public class RegistryStorageConfigCache extends RegistryStorageDecoratorBase implements RegistryStorageDecorator { +public class RegistryStorageConfigCache extends RegistryStorageDecoratorBase + implements RegistryStorageDecorator { private static final DynamicConfigPropertyDto NULL_DTO = new DynamicConfigPropertyDto(); @@ -50,7 +51,6 @@ public int order() { return RegistryStorageDecoratorOrderConstants.CONFIG_CACHE_DECORATOR; } - /** * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#setConfigProperty(io.apicurio.common.apps.config.DynamicConfigPropertyDto) */ diff --git a/app/src/main/java/io/apicurio/registry/content/ContentHandleMessageBodyWriter.java b/app/src/main/java/io/apicurio/registry/content/ContentHandleMessageBodyWriter.java index de15cf3ad7..dccdfbcb72 100644 --- a/app/src/main/java/io/apicurio/registry/content/ContentHandleMessageBodyWriter.java +++ b/app/src/main/java/io/apicurio/registry/content/ContentHandleMessageBodyWriter.java @@ -1,12 +1,6 @@ package io.apicurio.registry.content; import io.apicurio.registry.utils.IoUtil; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.lang.annotation.Annotation; -import java.lang.reflect.Type; import jakarta.ws.rs.Produces; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.MediaType; @@ -14,24 +8,25 @@ import jakarta.ws.rs.ext.MessageBodyWriter; import jakarta.ws.rs.ext.Provider; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.annotation.Annotation; +import java.lang.reflect.Type; + @Provider @Produces(MediaType.WILDCARD) public class ContentHandleMessageBodyWriter implements MessageBodyWriter { @Override - public boolean isWriteable(Class type, Type genericType, Annotation[] annotations, MediaType mediaType) { + public boolean isWriteable(Class type, Type genericType, Annotation[] annotations, + MediaType mediaType) { return ContentHandle.class.isAssignableFrom(type); } @Override - public void writeTo( - ContentHandle content, - Class type, - Type genericType, - Annotation[] annotations, - MediaType mediaType, - MultivaluedMap httpHeaders, - OutputStream entityStream - ) throws IOException, WebApplicationException { + public void writeTo(ContentHandle content, Class type, Type genericType, Annotation[] annotations, + MediaType mediaType, MultivaluedMap httpHeaders, OutputStream entityStream) + throws IOException, WebApplicationException { try (InputStream stream = content.stream()) { IoUtil.copy(stream, entityStream); } diff --git a/app/src/main/java/io/apicurio/registry/downloads/DownloadReaper.java b/app/src/main/java/io/apicurio/registry/downloads/DownloadReaper.java index 466ce407ed..98d609029a 100644 --- a/app/src/main/java/io/apicurio/registry/downloads/DownloadReaper.java +++ b/app/src/main/java/io/apicurio/registry/downloads/DownloadReaper.java @@ -13,7 +13,6 @@ /** * Periodically cleanup data of downloads marked as deleted. - * */ @ApplicationScoped public class DownloadReaper { @@ -31,8 +30,8 @@ public class DownloadReaper { @Scheduled(delay = 2, concurrentExecution = SKIP, every = "{apicurio.downloads.reaper.every}") void run() { try { - if(storage.isReady()) { - if(!storage.isReadOnly()) { + if (storage.isReady()) { + if (!storage.isReadOnly()) { log.debug("Running download reaper job at {}", Instant.now()); reap(); } else { @@ -41,8 +40,7 @@ void run() { } else { log.debug("Skipping download reaper job because the storage is not ready."); } - } - catch (Exception ex) { + } catch (Exception ex) { log.error("Exception thrown when running download reaper job", ex); } } diff --git a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfiguration.java b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfiguration.java index decac9be83..462fb129c4 100644 --- a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfiguration.java +++ b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfiguration.java @@ -6,7 +6,6 @@ /** * NOTE: Follow the naming conventions from {@link io.apicurio.registry.rest.v3.beans.Limits} - * */ @Getter @Setter diff --git a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfigurationProducer.java b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfigurationProducer.java index 12f5ce6b5d..3ee504237d 100644 --- a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfigurationProducer.java +++ b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsConfigurationProducer.java @@ -12,7 +12,7 @@ public class RegistryLimitsConfigurationProducer { @Inject Logger logger; - //All limits to -1 , which means by default all limits are disabled + // All limits to -1 , which means by default all limits are disabled @Inject @ConfigProperty(defaultValue = "-1", name = "apicurio.limits.config.max-total-schemas") @@ -33,7 +33,7 @@ public class RegistryLimitsConfigurationProducer { @Info(category = "limits", description = "Max versions per artifacts", availableSince = "2.1.0.Final") Long defaultMaxVersionsPerArtifact; - //TODO content size + // TODO content size @Inject @ConfigProperty(defaultValue = "-1", name = "apicurio.limits.config.max-artifact-properties") @Info(category = "limits", description = "Max artifact properties", availableSince = "2.1.0.Final") @@ -70,7 +70,6 @@ public class RegistryLimitsConfigurationProducer { @Info(category = "limits", description = "Max artifact requests per second", availableSince = "2.2.3.Final") Long defaultMaxRequestsPerSecond; - private boolean isConfigured = true; private RegistryLimitsConfiguration defaultLimitsConfiguration; diff --git a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsService.java b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsService.java index 81515a8f5b..343d5b8a09 100644 --- a/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsService.java +++ b/app/src/main/java/io/apicurio/registry/limits/RegistryLimitsService.java @@ -16,12 +16,11 @@ /** * Component that provides the logic to enforce the limits in the usage of the registry - * */ @ApplicationScoped public class RegistryLimitsService { - //FIXME improve error messages + // FIXME improve error messages private static final String MAX_TOTAL_SCHEMAS_EXCEEDED_MSG = "Maximum number of artifact versions exceeded"; private static final String MAX_SCHEMA_SIZE_EXCEEDED_MSG = "Maximum size of artifact version exceeded"; private static final String MAX_ARTIFACTS_EXCEEDED_MSG = "Maximum number of artifacts exceeded"; @@ -44,7 +43,7 @@ public class RegistryLimitsService { private LimitsCheckResult checkTotalSchemas() { if (isLimitDisabled(RegistryLimitsConfiguration::getMaxTotalSchemasCount)) { - //limits check disabled + // limits check disabled return LimitsCheckResult.ok(); } @@ -53,12 +52,14 @@ private LimitsCheckResult checkTotalSchemas() { if (currentTotalSchemas < registryLimitsConfiguration.getMaxTotalSchemasCount()) { return LimitsCheckResult.ok(); } else { - log.debug("Limit reached, current total schemas {} , max total schemas {}", currentTotalSchemas, registryLimitsConfiguration.getMaxTotalSchemasCount()); + log.debug("Limit reached, current total schemas {} , max total schemas {}", currentTotalSchemas, + registryLimitsConfiguration.getMaxTotalSchemasCount()); return LimitsCheckResult.disallowed(MAX_TOTAL_SCHEMAS_EXCEEDED_MSG); } } - public LimitsCheckResult canCreateArtifact(EditableArtifactMetaDataDto meta, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData) { + public LimitsCheckResult canCreateArtifact(EditableArtifactMetaDataDto meta, + ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData) { LimitsCheckResult mr = checkMetaData(meta); if (!mr.isAllowed()) { @@ -85,7 +86,7 @@ public LimitsCheckResult canCreateArtifact(EditableArtifactMetaDataDto meta, Con } if (isLimitDisabled(RegistryLimitsConfiguration::getMaxArtifactsCount)) { - //limits check disabled + // limits check disabled return LimitsCheckResult.ok(); } @@ -94,7 +95,8 @@ public LimitsCheckResult canCreateArtifact(EditableArtifactMetaDataDto meta, Con if (currentArtifacts < registryLimitsConfiguration.getMaxArtifactsCount()) { return LimitsCheckResult.ok(); } else { - log.debug("Limit reached, current artifacts {} , max artifacts allowed {}", currentArtifacts, registryLimitsConfiguration.getMaxArtifactsCount()); + log.debug("Limit reached, current artifacts {} , max artifacts allowed {}", currentArtifacts, + registryLimitsConfiguration.getMaxArtifactsCount()); return LimitsCheckResult.disallowed(MAX_ARTIFACTS_EXCEEDED_MSG); } } @@ -108,12 +110,14 @@ private LimitsCheckResult checkSchemaSize(ContentHandle content) { if (size <= registryLimitsConfiguration.getMaxSchemaSizeBytes()) { return LimitsCheckResult.ok(); } else { - log.debug("Limit reached, schema size is {} , max schema size is {}", size, registryLimitsConfiguration.getMaxSchemaSizeBytes()); + log.debug("Limit reached, schema size is {} , max schema size is {}", size, + registryLimitsConfiguration.getMaxSchemaSizeBytes()); return LimitsCheckResult.disallowed(MAX_SCHEMA_SIZE_EXCEEDED_MSG); } } - public LimitsCheckResult canCreateArtifactVersion(String groupId, String artifactId, EditableVersionMetaDataDto meta, ContentHandle content) { + public LimitsCheckResult canCreateArtifactVersion(String groupId, String artifactId, + EditableVersionMetaDataDto meta, ContentHandle content) { LimitsCheckResult mr = checkMetaData(meta); if (!mr.isAllowed()) { @@ -131,16 +135,20 @@ public LimitsCheckResult canCreateArtifactVersion(String groupId, String artifac } if (isLimitDisabled(RegistryLimitsConfiguration::getMaxVersionsPerArtifactCount)) { - //limits check disabled + // limits check disabled return LimitsCheckResult.ok(); } - long currentArtifactVersions = storageMetricsStore.getOrInitializeArtifactVersionsCounter(groupId, artifactId); + long currentArtifactVersions = storageMetricsStore.getOrInitializeArtifactVersionsCounter(groupId, + artifactId); if (currentArtifactVersions < registryLimitsConfiguration.getMaxVersionsPerArtifactCount()) { return LimitsCheckResult.ok(); } else { - log.debug("Limit reached, current versions per artifact for artifact {}/{} {} , max versions per artifacts allowed {}", groupId, artifactId, currentArtifactVersions, registryLimitsConfiguration.getMaxVersionsPerArtifactCount()); + log.debug( + "Limit reached, current versions per artifact for artifact {}/{} {} , max versions per artifacts allowed {}", + groupId, artifactId, currentArtifactVersions, + registryLimitsConfiguration.getMaxVersionsPerArtifactCount()); return LimitsCheckResult.disallowed(MAX_VERSIONS_PER_ARTIFACT_EXCEEDED_MSG); } } @@ -200,7 +208,7 @@ public LimitsCheckResult checkMetaData(EditableVersionMetaDataDto meta) { } protected void checkName(String name, List errorMessages) { - //name is limited at db level to 512 chars + // name is limited at db level to 512 chars if (name != null && isLimitEnabled(RegistryLimitsConfiguration::getMaxArtifactNameLengthChars)) { if (name.length() > registryLimitsConfiguration.getMaxArtifactNameLengthChars()) { errorMessages.add(MAX_NAME_LENGTH_EXCEEDED_MSG); @@ -209,8 +217,9 @@ protected void checkName(String name, List errorMessages) { } protected void checkDescription(String description, List errorMessages) { - //description is limited at db level to 1024 chars - if (description != null && isLimitEnabled(RegistryLimitsConfiguration::getMaxArtifactDescriptionLengthChars)) { + // description is limited at db level to 1024 chars + if (description != null + && isLimitEnabled(RegistryLimitsConfiguration::getMaxArtifactDescriptionLengthChars)) { if (description.length() > registryLimitsConfiguration.getMaxArtifactDescriptionLengthChars()) { errorMessages.add(MAX_DESC_LENGTH_EXCEEDED_MSG); @@ -224,23 +233,24 @@ protected void checkDescription(String description, List errorMessages) */ protected void checkLabels(Map labels, List errorMessages) { if (labels != null) { - if (isLimitEnabled(RegistryLimitsConfiguration::getMaxArtifactPropertiesCount) && - labels.size() > registryLimitsConfiguration.getMaxArtifactPropertiesCount()) { + if (isLimitEnabled(RegistryLimitsConfiguration::getMaxArtifactPropertiesCount) + && labels.size() > registryLimitsConfiguration.getMaxArtifactPropertiesCount()) { errorMessages.add(MAX_LABELS_EXCEEDED_MSG); - } else if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyKeySizeBytes) || - isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyValueSizeBytes)){ + } else if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyKeySizeBytes) + || isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyValueSizeBytes)) { labels.entrySet().forEach(e -> { - if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyKeySizeBytes) && - e.getKey().length() > registryLimitsConfiguration.getMaxPropertyKeySizeBytes()) { + if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyKeySizeBytes) && e.getKey() + .length() > registryLimitsConfiguration.getMaxPropertyKeySizeBytes()) { errorMessages.add(MAX_LABEL_KEY_SIZE_EXCEEDED_MSG); } - if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyValueSizeBytes) && - e.getValue().length() > registryLimitsConfiguration.getMaxPropertyValueSizeBytes()) { + if (isLimitEnabled(RegistryLimitsConfiguration::getMaxPropertyValueSizeBytes) + && e.getValue().length() > registryLimitsConfiguration + .getMaxPropertyValueSizeBytes()) { errorMessages.add(MAX_LABEL_VALUE_SIZE_EXCEEDED_MSG); } }); diff --git a/app/src/main/java/io/apicurio/registry/limits/RegistryStorageLimitsEnforcer.java b/app/src/main/java/io/apicurio/registry/limits/RegistryStorageLimitsEnforcer.java index 0e5fedc8f2..deca93336d 100644 --- a/app/src/main/java/io/apicurio/registry/limits/RegistryStorageLimitsEnforcer.java +++ b/app/src/main/java/io/apicurio/registry/limits/RegistryStorageLimitsEnforcer.java @@ -22,13 +22,14 @@ import java.util.function.Supplier; /** - * Decorator of {@link RegistryStorage} that applies limits enforcement, with this is possible to limit how many artifacts can be created in registry... - * All of that is abstracted with the LimitsService and the LimitsConfigurationService - * + * Decorator of {@link RegistryStorage} that applies limits enforcement, with this is possible to limit how + * many artifacts can be created in registry... All of that is abstracted with the LimitsService and the + * LimitsConfigurationService */ @ApplicationScoped // TODO Importing is not covered under limits! -public class RegistryStorageLimitsEnforcer extends RegistryStorageDecoratorBase implements RegistryStorageDecorator { +public class RegistryStorageLimitsEnforcer extends RegistryStorageDecoratorBase + implements RegistryStorageDecorator { @Inject ThreadContext threadContext; @@ -55,54 +56,62 @@ public int order() { return RegistryStorageDecoratorOrderConstants.LIMITS_ENFORCER_DECORATOR; } - @Override - public Pair createArtifact(String groupId, String artifactId, - String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, ContentWrapperDto versionContent, - EditableVersionMetaDataDto versionMetaData, List versionBranches) throws RegistryStorageException { - Pair rval = withLimitsCheck(() -> limitsService.canCreateArtifact(artifactMetaData, versionContent, versionMetaData)) - .execute(() -> super.createArtifact(groupId, artifactId, artifactType, artifactMetaData, version, versionContent, versionMetaData, versionBranches)); + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws RegistryStorageException { + Pair rval = withLimitsCheck( + () -> limitsService.canCreateArtifact(artifactMetaData, versionContent, versionMetaData)) + .execute(() -> super.createArtifact(groupId, artifactId, artifactType, artifactMetaData, + version, versionContent, versionMetaData, versionBranches)); limitsService.artifactCreated(); return rval; } @Override - public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, String artifactType, - ContentWrapperDto content, EditableVersionMetaDataDto metaData, List branches) throws RegistryStorageException { - ArtifactVersionMetaDataDto dto = withLimitsCheck(() -> limitsService.canCreateArtifactVersion(groupId, artifactId, null, content.getContent())) - .execute(() -> super.createArtifactVersion(groupId, artifactId, version, artifactType, content, metaData, branches)); + public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, + String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, + List branches) throws RegistryStorageException { + ArtifactVersionMetaDataDto dto = withLimitsCheck( + () -> limitsService.canCreateArtifactVersion(groupId, artifactId, null, content.getContent())) + .execute(() -> super.createArtifactVersion(groupId, artifactId, version, artifactType, + content, metaData, branches)); limitsService.artifactVersionCreated(groupId, artifactId); return dto; } /** - * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#updateArtifactMetaData(java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableArtifactMetaDataDto) + * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#updateArtifactMetaData(java.lang.String, + * java.lang.String, io.apicurio.registry.storage.dto.EditableArtifactMetaDataDto) */ @Override - public void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { - withLimitsCheck(() -> limitsService.checkMetaData(metaData)) - .execute(() -> { - super.updateArtifactMetaData(groupId, artifactId, metaData); - return null; - }); + public void updateArtifactMetaData(String groupId, String artifactId, + EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { + withLimitsCheck(() -> limitsService.checkMetaData(metaData)).execute(() -> { + super.updateArtifactMetaData(groupId, artifactId, metaData); + return null; + }); } /** - * @see io.apicurio.registry.storage.decorator.RegistryStorageDecoratorBase#updateArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableVersionMetaDataDto) + * @see io.apicurio.registry.storage.decorator.RegistryStorageDecoratorBase#updateArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableVersionMetaDataDto) */ @Override - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto metaData) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { - withLimitsCheck(() -> limitsService.checkMetaData(metaData)) - .execute(() -> { - super.updateArtifactVersionMetaData(groupId, artifactId, version, metaData); - return null; - }); + withLimitsCheck(() -> limitsService.checkMetaData(metaData)).execute(() -> { + super.updateArtifactVersionMetaData(groupId, artifactId, version, metaData); + return null; + }); } /** - * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#deleteArtifact(java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#deleteArtifact(java.lang.String, + * java.lang.String) */ @Override public List deleteArtifact(String groupId, String artifactId) @@ -122,7 +131,8 @@ public void deleteArtifacts(String groupId) throws RegistryStorageException { } /** - * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#deleteArtifactVersion(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.decorator.RegistryStorageDecorator#deleteArtifactVersion(java.lang.String, + * java.lang.String, java.lang.String) */ @Override public void deleteArtifactVersion(String groupId, String artifactId, String version) @@ -131,18 +141,17 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers limitsService.artifactVersionDeleted(groupId, artifactId); } - /** - * Notice the "threadContext.withContextCapture" because of using CompletionStage it's possible that certain operations may be executed in different threads. - * We need context propagation to move the ThreadLocale context - * from one thread to another, that's why we use withContextCapture + * Notice the "threadContext.withContextCapture" because of using CompletionStage it's possible that + * certain operations may be executed in different threads. We need context propagation to move the + * ThreadLocale context from one thread to another, that's why we use withContextCapture * * @param checker * @return */ public LimitedActionExecutor withLimitsCheck(LimitsChecker checker) { return new LimitedActionExecutor() { - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public T execute(LimitedAction action) { LimitsCheckResult r = checker.get(); @@ -159,17 +168,14 @@ public T execute(LimitedAction action) { }; } - @FunctionalInterface private interface LimitsChecker extends Supplier { } - @FunctionalInterface private interface LimitedAction extends Supplier { } - @FunctionalInterface private interface LimitedActionExecutor { diff --git a/app/src/main/java/io/apicurio/registry/metrics/CustomMetricsConfiguration.java b/app/src/main/java/io/apicurio/registry/metrics/CustomMetricsConfiguration.java index 2dcd956f7a..716d8ff67d 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/CustomMetricsConfiguration.java +++ b/app/src/main/java/io/apicurio/registry/metrics/CustomMetricsConfiguration.java @@ -3,7 +3,6 @@ import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.config.MeterFilter; import io.micrometer.core.instrument.distribution.DistributionStatisticConfig; - import jakarta.enterprise.inject.Produces; import jakarta.inject.Singleton; @@ -13,16 +12,15 @@ public class CustomMetricsConfiguration { @Produces @Singleton public MeterFilter enableHistogram() { - double factor = 1000000000; //to convert slos to seconds + double factor = 1000000000; // to convert slos to seconds return new MeterFilter() { @Override public DistributionStatisticConfig configure(Meter.Id id, DistributionStatisticConfig config) { - if(id.getName().startsWith(MetricsConstants.REST_REQUESTS)) { - return DistributionStatisticConfig.builder() - .percentiles(0.5, 0.95, 0.99, 0.9995) - .serviceLevelObjectives(0.1 * factor, 0.25 * factor, 0.5 * factor, 1.0 * factor, 2.0 * factor, 5.0 * factor, 10.0 * factor) - .build() - .merge(config); + if (id.getName().startsWith(MetricsConstants.REST_REQUESTS)) { + return DistributionStatisticConfig.builder().percentiles(0.5, 0.95, 0.99, 0.9995) + .serviceLevelObjectives(0.1 * factor, 0.25 * factor, 0.5 * factor, 1.0 * factor, + 2.0 * factor, 5.0 * factor, 10.0 * factor) + .build().merge(config); } return config; } diff --git a/app/src/main/java/io/apicurio/registry/metrics/MetricsConstants.java b/app/src/main/java/io/apicurio/registry/metrics/MetricsConstants.java index 261be8e591..666e2e3789 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/MetricsConstants.java +++ b/app/src/main/java/io/apicurio/registry/metrics/MetricsConstants.java @@ -3,10 +3,8 @@ /** * Metrics naming constants. *

- * See: - * - https://micrometer.io/docs/concepts#_naming_meters - * - https://prometheus.io/docs/practices/naming/ (Micrometer abstracts some naming aspects.) - * + * See: - https://micrometer.io/docs/concepts#_naming_meters - https://prometheus.io/docs/practices/naming/ + * (Micrometer abstracts some naming aspects.) */ public interface MetricsConstants { diff --git a/app/src/main/java/io/apicurio/registry/metrics/RestMetricsResponseFilter.java b/app/src/main/java/io/apicurio/registry/metrics/RestMetricsResponseFilter.java index fcc7379849..a58a794543 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/RestMetricsResponseFilter.java +++ b/app/src/main/java/io/apicurio/registry/metrics/RestMetricsResponseFilter.java @@ -3,7 +3,6 @@ import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.Timer; - import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.inject.Default; import jakarta.inject.Inject; @@ -15,6 +14,7 @@ import jakarta.ws.rs.container.ResourceInfo; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.ext.Provider; + import java.io.IOException; import java.util.regex.Pattern; @@ -27,9 +27,7 @@ import static io.apicurio.registry.metrics.MetricsConstants.REST_REQUESTS_TAG_STATUS_CODE_FAMILY; /** - * Filters REST API requests and responses to report metrics - * about them. - * + * Filters REST API requests and responses to report metrics about them. */ @Provider @Default @@ -44,7 +42,8 @@ public class RestMetricsResponseFilter implements ContainerRequestFilter, Contai @Context private ResourceInfo resourceInfo; - // I couldn't figure out an easy way to use an annotation that can be applied on the whole REST resource class, + // I couldn't figure out an easy way to use an annotation that can be applied on the whole REST resource + // class, // instead of on each method (or jakarta.ws.rs.core.Application). // See https://docs.oracle.com/javaee/7/api/javax/ws/rs/NameBinding.html static final Pattern ENABLED_PATTERN = Pattern.compile("/apis/.*"); @@ -66,9 +65,7 @@ public void filter(ContainerRequestContext requestContext, ContainerResponseCont return; } - Timer timer = Timer - .builder(REST_REQUESTS) - .description(REST_REQUESTS_DESCRIPTION) + Timer timer = Timer.builder(REST_REQUESTS).description(REST_REQUESTS_DESCRIPTION) .tag(REST_REQUESTS_TAG_PATH, this.getPath()) .tag(REST_REQUESTS_TAG_METHOD, requestContext.getMethod()) .tag(REST_REQUESTS_TAG_STATUS_CODE_FAMILY, this.getStatusGroup(responseContext.getStatus())) @@ -77,13 +74,11 @@ public void filter(ContainerRequestContext requestContext, ContainerResponseCont Timer.Sample sample = (Timer.Sample) requestContext.getProperty(TIMER_SAMPLE_CONTEXT_PROPERTY_NAME); sample.stop(timer); - Counter.builder(REST_REQUESTS_COUNTER) - .description(REST_REQUESTS_COUNTER_DESCRIPTION) + Counter.builder(REST_REQUESTS_COUNTER).description(REST_REQUESTS_COUNTER_DESCRIPTION) .tag(REST_REQUESTS_TAG_PATH, this.getPath()) .tag(REST_REQUESTS_TAG_METHOD, requestContext.getMethod()) .tag(REST_REQUESTS_TAG_STATUS_CODE_FAMILY, this.getStatusGroup(responseContext.getStatus())) - .register(registry) - .increment(); + .register(registry).increment(); } private String getStatusGroup(int statusCode) { diff --git a/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsApply.java b/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsApply.java index ee9f1de9dd..0b8f27fe9c 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsApply.java +++ b/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsApply.java @@ -1,6 +1,7 @@ package io.apicurio.registry.metrics; import jakarta.interceptor.InterceptorBinding; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -8,6 +9,6 @@ @InterceptorBinding @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE, ElementType.METHOD}) +@Target({ ElementType.TYPE, ElementType.METHOD }) public @interface StorageMetricsApply { } diff --git a/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsInterceptor.java b/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsInterceptor.java index b515b3531e..a1546379f0 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsInterceptor.java +++ b/app/src/main/java/io/apicurio/registry/metrics/StorageMetricsInterceptor.java @@ -2,12 +2,12 @@ import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.Timer; -import org.eclipse.microprofile.context.ThreadContext; - import jakarta.inject.Inject; import jakarta.interceptor.AroundInvoke; import jakarta.interceptor.Interceptor; import jakarta.interceptor.InvocationContext; +import org.eclipse.microprofile.context.ThreadContext; + import java.lang.reflect.Method; import java.util.concurrent.CompletionStage; @@ -18,7 +18,6 @@ /** * Fail readiness check if the duration of processing a artifactStore operation is too high. - * */ @Interceptor @StorageMetricsApply @@ -51,8 +50,8 @@ public Object intercept(InvocationContext context) throws Exception { if (result instanceof CompletionStage) { CompletionStage r = (CompletionStage) result; - threadContext.withContextCapture(r).whenComplete((ok, ex) -> - this.record(sample, context.getMethod(), ex == null)); // TODO + threadContext.withContextCapture(r) + .whenComplete((ok, ex) -> this.record(sample, context.getMethod(), ex == null)); // TODO return r; } @@ -61,12 +60,9 @@ public Object intercept(InvocationContext context) throws Exception { } private void record(Timer.Sample sample, Method method, boolean success) { - Timer timer = Timer - .builder(STORAGE_METHOD_CALL) - .description(STORAGE_METHOD_CALL_DESCRIPTION) - .tag(STORAGE_METHOD_CALL_TAG_METHOD, getMethodString(method)) - .tag(STORAGE_METHOD_CALL_TAG_SUCCESS, String.valueOf(success)) - .register(registry); + Timer timer = Timer.builder(STORAGE_METHOD_CALL).description(STORAGE_METHOD_CALL_DESCRIPTION) + .tag(STORAGE_METHOD_CALL_TAG_METHOD, getMethodString(method)) + .tag(STORAGE_METHOD_CALL_TAG_SUCCESS, String.valueOf(success)).register(registry); sample.stop(timer); } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/AbstractErrorCounterHealthCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/AbstractErrorCounterHealthCheck.java index 24edc8d294..857981d707 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/AbstractErrorCounterHealthCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/AbstractErrorCounterHealthCheck.java @@ -6,7 +6,6 @@ /** * Abstract class containing common logic for health checks based on an error counter. - * */ public abstract class AbstractErrorCounterHealthCheck { @@ -18,19 +17,22 @@ public abstract class AbstractErrorCounterHealthCheck { private Duration counterResetWindowDuration; private Integer configErrorThreshold; - protected void init(Integer configErrorThreshold, Integer configCounterResetWindowDurationSec, Integer configStatusResetWindowDurationSec) { + protected void init(Integer configErrorThreshold, Integer configCounterResetWindowDurationSec, + Integer configStatusResetWindowDurationSec) { if (configErrorThreshold == null || configErrorThreshold < 0) { - throw new IllegalArgumentException("Illegal configuration value of " + - "'registry.metrics.[...].errorThreshold': '" + configErrorThreshold + "'"); + throw new IllegalArgumentException("Illegal configuration value of " + + "'registry.metrics.[...].errorThreshold': '" + configErrorThreshold + "'"); } this.configErrorThreshold = configErrorThreshold; if (configCounterResetWindowDurationSec == null || configCounterResetWindowDurationSec < 1) { - throw new IllegalArgumentException("Illegal configuration value of " + - "'registry.metrics.[...].counterResetWindowDurationSec': '" + configCounterResetWindowDurationSec + "'"); + throw new IllegalArgumentException("Illegal configuration value of " + + "'registry.metrics.[...].counterResetWindowDurationSec': '" + + configCounterResetWindowDurationSec + "'"); } if (configStatusResetWindowDurationSec == null) { - throw new IllegalArgumentException("Illegal configuration value of " + - "'registry.metrics.[...].statusResetWindowDurationSec': '" + configCounterResetWindowDurationSec + "'"); + throw new IllegalArgumentException("Illegal configuration value of " + + "'registry.metrics.[...].statusResetWindowDurationSec': '" + + configCounterResetWindowDurationSec + "'"); } counterResetWindowDuration = Duration.ofSeconds(configCounterResetWindowDurationSec); nextCounterReset = Instant.now().plus(counterResetWindowDuration); @@ -43,7 +45,8 @@ protected synchronized void suspectSuper() { nextCounterReset = Instant.now().plus(counterResetWindowDuration); if (++errorCounter > configErrorThreshold) { up = false; - statusResetWindowDuration.ifPresent(duration -> nextStatusReset = Optional.of(Instant.now().plus(duration))); + statusResetWindowDuration + .ifPresent(duration -> nextStatusReset = Optional.of(Instant.now().plus(duration))); } } @@ -52,7 +55,8 @@ protected synchronized void callSuper() { nextStatusReset = Optional.empty(); up = true; // Next 'if' will reset the error count } - if (up && nextCounterReset != null && Instant.now().isAfter(nextCounterReset)) { // Do not reset the count if not up + if (up && nextCounterReset != null && Instant.now().isAfter(nextCounterReset)) { // Do not reset the + // count if not up nextCounterReset = null; errorCounter = 0; } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessCheck.java index 62229406fe..83abe55171 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessCheck.java @@ -2,7 +2,6 @@ /** * Common interface for a liveness check. - * */ public interface LivenessCheck { diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessUtil.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessUtil.java index 7f4d12a37b..d459ede5f9 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessUtil.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/LivenessUtil.java @@ -1,17 +1,15 @@ package io.apicurio.registry.metrics.health.liveness; -import java.util.List; -import java.util.Optional; -import java.util.Set; - +import io.apicurio.common.apps.config.Info; +import io.apicurio.registry.services.http.RegistryExceptionMapperService; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; - import org.eclipse.microprofile.config.inject.ConfigProperty; import org.slf4j.Logger; -import io.apicurio.common.apps.config.Info; -import io.apicurio.registry.services.http.RegistryExceptionMapperService; +import java.util.List; +import java.util.Optional; +import java.util.Set; @ApplicationScoped public class LivenessUtil { diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessApply.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessApply.java index 28375ee54b..7283899f52 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessApply.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessApply.java @@ -1,14 +1,14 @@ package io.apicurio.registry.metrics.health.liveness; +import jakarta.interceptor.InterceptorBinding; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import jakarta.interceptor.InterceptorBinding; - @InterceptorBinding @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE, ElementType.METHOD}) +@Target({ ElementType.TYPE, ElementType.METHOD }) public @interface PersistenceExceptionLivenessApply { } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessCheck.java index da993e48a6..1c71c7ea0c 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessCheck.java @@ -14,19 +14,18 @@ /** * Fail liveness check if the number of exceptions thrown by artifactStore is too high. - * */ @ApplicationScoped @Liveness @Default -public class PersistenceExceptionLivenessCheck extends AbstractErrorCounterHealthCheck implements HealthCheck, LivenessCheck { +public class PersistenceExceptionLivenessCheck extends AbstractErrorCounterHealthCheck + implements HealthCheck, LivenessCheck { @Inject Logger log; /** - * Maximum number of exceptions raised by artifactStore implementation, - * as captured by this interceptor, + * Maximum number of exceptions raised by artifactStore implementation, as captured by this interceptor, * before the liveness check fails. */ @ConfigProperty(name = "apicurio.metrics.PersistenceExceptionLivenessCheck.errorThreshold", defaultValue = "1") @@ -34,17 +33,16 @@ public class PersistenceExceptionLivenessCheck extends AbstractErrorCounterHealt Integer configErrorThreshold; /** - * The counter is reset after some time without errors. - * i.e. to fail the check after 2 errors in a minute, set the threshold to 1 and this configuration option - * to 60. - * TODO report the absolute count as a metric? + * The counter is reset after some time without errors. i.e. to fail the check after 2 errors in a minute, + * set the threshold to 1 and this configuration option to 60. TODO report the absolute count as a metric? */ @ConfigProperty(name = "apicurio.metrics.PersistenceExceptionLivenessCheck.counterResetWindowDuration.seconds", defaultValue = "60") @Info(category = "health", description = "Counter reset window duration of persistence liveness check", availableSince = "1.0.2.Final") Integer configCounterResetWindowDurationSec; /** - * If set to a positive value, reset the liveness status after this time window passes without any further errors. + * If set to a positive value, reset the liveness status after this time window passes without any further + * errors. */ @ConfigProperty(name = "apicurio.metrics.PersistenceExceptionLivenessCheck.statusResetWindowDuration.seconds", defaultValue = "300") @Info(category = "health", description = "Status reset window duration of persistence liveness check", availableSince = "1.0.2.Final") @@ -62,11 +60,8 @@ void init() { @Override public synchronized HealthCheckResponse call() { callSuper(); - return HealthCheckResponse.builder() - .name("PersistenceExceptionLivenessCheck") - .withData("errorCount", errorCounter) - .status(up) - .build(); + return HealthCheckResponse.builder().name("PersistenceExceptionLivenessCheck") + .withData("errorCount", errorCounter).status(up).build(); } @Override @@ -76,18 +71,22 @@ public void suspect(String reason) { } super.suspectSuper(); if (disableLogging != Boolean.TRUE) { - log.info("After this event, the error counter is {} (out of the maximum {} allowed).", errorCounter, configErrorThreshold); + log.info("After this event, the error counter is {} (out of the maximum {} allowed).", + errorCounter, configErrorThreshold); } } @Override public void suspectWithException(Throwable reason) { if (disableLogging != Boolean.TRUE) { - log.warn("Liveness problem suspected in PersistenceExceptionLivenessCheck because of an exception: ", reason); + log.warn( + "Liveness problem suspected in PersistenceExceptionLivenessCheck because of an exception: ", + reason); } super.suspectSuper(); if (disableLogging != Boolean.TRUE) { - log.info("After this event, the error counter is {} (out of the maximum {} allowed).", errorCounter, configErrorThreshold); + log.info("After this event, the error counter is {} (out of the maximum {} allowed).", + errorCounter, configErrorThreshold); } } } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessInterceptor.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessInterceptor.java index 42a2642f0c..eaf2ef9583 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessInterceptor.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/PersistenceExceptionLivenessInterceptor.java @@ -1,7 +1,6 @@ package io.apicurio.registry.metrics.health.liveness; import jakarta.annotation.Priority; - import jakarta.inject.Inject; import jakarta.interceptor.AroundInvoke; import jakarta.interceptor.Interceptor; @@ -9,7 +8,6 @@ /** * Fail liveness check if the number of exceptions thrown by artifactStore is too high. - * */ @Interceptor @Priority(Interceptor.Priority.APPLICATION) diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/ResponseErrorLivenessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/ResponseErrorLivenessCheck.java index ecbae6ad6b..2ddac03d55 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/ResponseErrorLivenessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/ResponseErrorLivenessCheck.java @@ -15,32 +15,31 @@ @ApplicationScoped @Liveness @Default -public class ResponseErrorLivenessCheck extends AbstractErrorCounterHealthCheck implements HealthCheck, LivenessCheck { +public class ResponseErrorLivenessCheck extends AbstractErrorCounterHealthCheck + implements HealthCheck, LivenessCheck { @Inject Logger log; /** - * Maximum number of HTTP 5xx errors returned to the user - * as captured by {@link io.apicurio.registry.rest.RegistryExceptionMapper} - * before the liveness check fails. + * Maximum number of HTTP 5xx errors returned to the user as captured by + * {@link io.apicurio.registry.rest.RegistryExceptionMapper} before the liveness check fails. */ @ConfigProperty(name = "apicurio.metrics.ResponseErrorLivenessCheck.errorThreshold", defaultValue = "1") @Info(category = "health", description = "Error threshold of response liveness check", availableSince = "1.0.2.Final") Integer configErrorThreshold; /** - * The counter is reset after some time without errors. - * i.e. to fail the check after 2 errors in a minute, set the threshold to 1 and this configuration option - * to 60. - * TODO report the absolute count as a metric? + * The counter is reset after some time without errors. i.e. to fail the check after 2 errors in a minute, + * set the threshold to 1 and this configuration option to 60. TODO report the absolute count as a metric? */ @ConfigProperty(name = "apicurio.metrics.ResponseErrorLivenessCheck.counterResetWindowDuration.seconds", defaultValue = "60") @Info(category = "health", description = "Counter reset window duration of response liveness check", availableSince = "1.0.2.Final") Integer configCounterResetWindowDurationSec; /** - * If set to a positive value, reset the liveness status after this time window passes without any further errors. + * If set to a positive value, reset the liveness status after this time window passes without any further + * errors. */ @ConfigProperty(name = "apicurio.metrics.ResponseErrorLivenessCheck.statusResetWindowDuration.seconds", defaultValue = "300") @Info(category = "health", description = "Status reset window duration of response liveness check", availableSince = "1.0.2.Final") @@ -58,11 +57,8 @@ void init() { @Override public synchronized HealthCheckResponse call() { callSuper(); - return HealthCheckResponse.builder() - .name("ResponseErrorLivenessCheck") - .withData("errorCount", errorCounter) - .status(up) - .build(); + return HealthCheckResponse.builder().name("ResponseErrorLivenessCheck") + .withData("errorCount", errorCounter).status(up).build(); } @Override @@ -72,18 +68,21 @@ public void suspect(String reason) { } super.suspectSuper(); if (disableLogging != Boolean.TRUE) { - log.info("After this event, the error counter is {} (out of the maximum {} allowed).", errorCounter, configErrorThreshold); + log.info("After this event, the error counter is {} (out of the maximum {} allowed).", + errorCounter, configErrorThreshold); } } @Override public void suspectWithException(Throwable reason) { if (disableLogging != Boolean.TRUE) { - log.warn("Liveness problem suspected in ResponseErrorLivenessCheck because of an exception: ", reason); + log.warn("Liveness problem suspected in ResponseErrorLivenessCheck because of an exception: ", + reason); } super.suspectSuper(); if (disableLogging != Boolean.TRUE) { - log.info("After this event, the error counter is {} (out of the maximum {} allowed).", errorCounter, configErrorThreshold); + log.info("After this event, the error counter is {} (out of the maximum {} allowed).", + errorCounter, configErrorThreshold); } } } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/StorageLivenessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/StorageLivenessCheck.java index 34734e9b10..8c4dfe6f99 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/liveness/StorageLivenessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/liveness/StorageLivenessCheck.java @@ -2,13 +2,12 @@ import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.types.Current; -import org.eclipse.microprofile.health.HealthCheck; -import org.eclipse.microprofile.health.HealthCheckResponse; -import org.eclipse.microprofile.health.Liveness; - import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.inject.Default; import jakarta.inject.Inject; +import org.eclipse.microprofile.health.HealthCheck; +import org.eclipse.microprofile.health.HealthCheckResponse; +import org.eclipse.microprofile.health.Liveness; @ApplicationScoped @Liveness @@ -21,9 +20,6 @@ public class StorageLivenessCheck implements HealthCheck { @Override public synchronized HealthCheckResponse call() { - return HealthCheckResponse.builder() - .name("StorageLivenessCheck") - .status(storage.isAlive()) - .build(); + return HealthCheckResponse.builder().name("StorageLivenessCheck").status(storage.isAlive()).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceSimpleReadinessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceSimpleReadinessCheck.java index c7636c29f5..14c7aa99e1 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceSimpleReadinessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceSimpleReadinessCheck.java @@ -3,15 +3,14 @@ import io.apicurio.registry.metrics.health.liveness.PersistenceExceptionLivenessCheck; import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.types.Current; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.Default; +import jakarta.inject.Inject; import org.eclipse.microprofile.health.HealthCheck; import org.eclipse.microprofile.health.HealthCheckResponse; import org.eclipse.microprofile.health.Readiness; import org.slf4j.Logger; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.enterprise.inject.Default; -import jakarta.inject.Inject; - @ApplicationScoped @Readiness @Default @@ -25,8 +24,7 @@ public class PersistenceSimpleReadinessCheck implements HealthCheck { RegistryStorage storage; /** - * An exception should also be caught by - * {@link PersistenceExceptionLivenessCheck} + * An exception should also be caught by {@link PersistenceExceptionLivenessCheck} */ private boolean test() { try { @@ -39,9 +37,6 @@ private boolean test() { @Override public synchronized HealthCheckResponse call() { - return HealthCheckResponse.builder() - .name("PersistenceSimpleReadinessCheck") - .status(test()) - .build(); + return HealthCheckResponse.builder().name("PersistenceSimpleReadinessCheck").status(test()).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessApply.java b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessApply.java index 4ea3a11e43..a93dcf8970 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessApply.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessApply.java @@ -1,14 +1,14 @@ package io.apicurio.registry.metrics.health.readiness; +import jakarta.interceptor.InterceptorBinding; + import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import jakarta.interceptor.InterceptorBinding; - @InterceptorBinding @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE, ElementType.METHOD}) +@Target({ ElementType.TYPE, ElementType.METHOD }) public @interface PersistenceTimeoutReadinessApply { } diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessCheck.java index 5c0c9005ea..1f0ce8e8c6 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessCheck.java @@ -16,7 +16,6 @@ /** * Fail readiness check if the duration of processing a artifactStore operation is too high. - * */ @ApplicationScoped @Liveness @@ -27,25 +26,23 @@ public class PersistenceTimeoutReadinessCheck extends AbstractErrorCounterHealth Logger log; /** - * Maximum number of timeouts as captured by this interceptor, - * before the readiness check fails. + * Maximum number of timeouts as captured by this interceptor, before the readiness check fails. */ @ConfigProperty(name = "apicurio.metrics.PersistenceTimeoutReadinessCheck.errorThreshold", defaultValue = "5") @Info(category = "health", description = "Error threshold of persistence readiness check", availableSince = "1.0.2.Final") Integer configErrorThreshold; /** - * The counter is reset after some time without errors. - * i.e. to fail the check after 2 errors in a minute, set the threshold to 1 and this configuration option - * to 60. - * TODO report the absolute count as a metric? + * The counter is reset after some time without errors. i.e. to fail the check after 2 errors in a minute, + * set the threshold to 1 and this configuration option to 60. TODO report the absolute count as a metric? */ @ConfigProperty(name = "apicurio.metrics.PersistenceTimeoutReadinessCheck.counterResetWindowDuration.seconds", defaultValue = "60") @Info(category = "health", description = "Counter reset window duration of persistence readiness check", availableSince = "1.0.2.Final") Integer configCounterResetWindowDurationSec; /** - * If set to a positive value, reset the readiness status after this time window passes without any further errors. + * If set to a positive value, reset the readiness status after this time window passes without any + * further errors. */ @ConfigProperty(name = "apicurio.metrics.PersistenceTimeoutReadinessCheck.statusResetWindowDuration.seconds", defaultValue = "300") @Info(category = "health", description = "Status reset window duration of persistence readiness check", availableSince = "1.0.2.Final") @@ -69,11 +66,8 @@ void init() { @Override public synchronized HealthCheckResponse call() { callSuper(); - return HealthCheckResponse.builder() - .name("PersistenceTimeoutReadinessCheck") - .withData("errorCount", errorCounter) - .status(up) - .build(); + return HealthCheckResponse.builder().name("PersistenceTimeoutReadinessCheck") + .withData("errorCount", errorCounter).status(up).build(); } public Duration getTimeoutSec() { diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessInterceptor.java b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessInterceptor.java index a9920d489a..21f5c9f58a 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessInterceptor.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/PersistenceTimeoutReadinessInterceptor.java @@ -1,18 +1,16 @@ package io.apicurio.registry.metrics.health.readiness; -import java.time.Instant; - +import jakarta.annotation.Priority; import jakarta.inject.Inject; import jakarta.interceptor.AroundInvoke; import jakarta.interceptor.Interceptor; import jakarta.interceptor.InvocationContext; - -import jakarta.annotation.Priority; import org.slf4j.Logger; +import java.time.Instant; + /** * Fail readiness check if the duration of processing a artifactStore operation is too high. - * */ @Interceptor @Priority(Interceptor.Priority.APPLICATION) diff --git a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/ResponseTimeoutReadinessCheck.java b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/ResponseTimeoutReadinessCheck.java index cd3ea60895..0a511ffefc 100644 --- a/app/src/main/java/io/apicurio/registry/metrics/health/readiness/ResponseTimeoutReadinessCheck.java +++ b/app/src/main/java/io/apicurio/registry/metrics/health/readiness/ResponseTimeoutReadinessCheck.java @@ -35,33 +35,32 @@ public class ResponseTimeoutReadinessCheck extends AbstractErrorCounterHealthChe Logger log; /** - * Maximum number of requests taking more than {@link ResponseTimeoutReadinessCheck#configTimeoutSec} seconds, - * before the readiness check fails. + * Maximum number of requests taking more than {@link ResponseTimeoutReadinessCheck#configTimeoutSec} + * seconds, before the readiness check fails. */ @ConfigProperty(name = "apicurio.metrics.ResponseTimeoutReadinessCheck.errorThreshold", defaultValue = "1") @Info(category = "health", description = "Error threshold of response readiness check", availableSince = "1.0.2.Final") Instance configErrorThreshold; /** - * The counter is reset after some time without errors. - * i.e. to fail the check after 2 errors in a minute, set the threshold to 1 and this configuration option - * to 60. - * TODO report the absolute count as a metric? + * The counter is reset after some time without errors. i.e. to fail the check after 2 errors in a minute, + * set the threshold to 1 and this configuration option to 60. TODO report the absolute count as a metric? */ @ConfigProperty(name = "apicurio.metrics.ResponseTimeoutReadinessCheck.counterResetWindowDuration.seconds", defaultValue = "60") @Info(category = "health", description = "Counter reset window duration of response readiness check", availableSince = "1.0.2.Final") Instance configCounterResetWindowDurationSec; /** - * If set to a positive value, reset the readiness status after this time window passes without any further errors. + * If set to a positive value, reset the readiness status after this time window passes without any + * further errors. */ @ConfigProperty(name = "apicurio.metrics.ResponseTimeoutReadinessCheck.statusResetWindowDuration.seconds", defaultValue = "300") @Info(category = "health", description = "Status reset window duration of response readiness check", availableSince = "1.0.2.Final") Instance configStatusResetWindowDurationSec; /** - * Set the request duration in seconds, after which it's considered an error. - * TODO This may be expected on some endpoints. Add a way to ignore those. + * Set the request duration in seconds, after which it's considered an error. TODO This may be expected on + * some endpoints. Add a way to ignore those. */ @ConfigProperty(name = "apicurio.metrics.ResponseTimeoutReadinessCheck.timeout.seconds", defaultValue = "10") @Info(category = "health", description = "Timeout of response readiness check", availableSince = "1.0.2.Final") @@ -71,7 +70,8 @@ public class ResponseTimeoutReadinessCheck extends AbstractErrorCounterHealthChe @PostConstruct void init() { - init(configErrorThreshold.get(), configCounterResetWindowDurationSec.get(), configStatusResetWindowDurationSec.get()); + init(configErrorThreshold.get(), configCounterResetWindowDurationSec.get(), + configStatusResetWindowDurationSec.get()); timeoutSec = Duration.ofSeconds(configTimeoutSec.get()); } @@ -100,10 +100,7 @@ public void filter(ContainerRequestContext requestContext, ContainerResponseCont @Override public synchronized HealthCheckResponse call() { callSuper(); - return HealthCheckResponse.builder() - .name("ResponseTimeoutReadinessCheck") - .withData("errorCount", errorCounter) - .status(up) - .build(); + return HealthCheckResponse.builder().name("ResponseTimeoutReadinessCheck") + .withData("errorCount", errorCounter).status(up).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/rest/AuthenticationFailedExceptionMapper.java b/app/src/main/java/io/apicurio/registry/rest/AuthenticationFailedExceptionMapper.java index 0afc7cf74c..1effb9f912 100644 --- a/app/src/main/java/io/apicurio/registry/rest/AuthenticationFailedExceptionMapper.java +++ b/app/src/main/java/io/apicurio/registry/rest/AuthenticationFailedExceptionMapper.java @@ -1,7 +1,6 @@ package io.apicurio.registry.rest; import io.quarkus.security.AuthenticationFailedException; - import jakarta.ws.rs.core.Response; import jakarta.ws.rs.ext.ExceptionMapper; diff --git a/app/src/main/java/io/apicurio/registry/rest/HeadersHack.java b/app/src/main/java/io/apicurio/registry/rest/HeadersHack.java index b223377bd5..162a02fe8c 100644 --- a/app/src/main/java/io/apicurio/registry/rest/HeadersHack.java +++ b/app/src/main/java/io/apicurio/registry/rest/HeadersHack.java @@ -1,22 +1,16 @@ package io.apicurio.registry.rest; -import java.util.function.Supplier; - import io.apicurio.registry.types.VersionState; import jakarta.ws.rs.core.Response; +import java.util.function.Supplier; + /** * Remove once Quarkus issue #9887 is fixed! - * */ public class HeadersHack { - public static void checkIfDeprecated( - Supplier stateSupplier, - String groupId, - String artifactId, - Object version, - Response.ResponseBuilder builder - ) { + public static void checkIfDeprecated(Supplier stateSupplier, String groupId, + String artifactId, Object version, Response.ResponseBuilder builder) { if (stateSupplier.get() == VersionState.DEPRECATED) { builder.header(Headers.DEPRECATED, true); builder.header(Headers.GROUP_ID, groupId); diff --git a/app/src/main/java/io/apicurio/registry/rest/MissingRequiredParameterException.java b/app/src/main/java/io/apicurio/registry/rest/MissingRequiredParameterException.java index fb21b1dc8a..fbea0172dc 100644 --- a/app/src/main/java/io/apicurio/registry/rest/MissingRequiredParameterException.java +++ b/app/src/main/java/io/apicurio/registry/rest/MissingRequiredParameterException.java @@ -5,9 +5,9 @@ public class MissingRequiredParameterException extends RegistryException { private static final long serialVersionUID = 3318387244830092754L; - + private final String parameter; - + /** * Constructor. */ @@ -15,7 +15,7 @@ public MissingRequiredParameterException(String parameter) { super("Request is missing a required parameter: " + parameter); this.parameter = parameter; } - + /** * @return the parameter */ diff --git a/app/src/main/java/io/apicurio/registry/rest/ParametersConflictException.java b/app/src/main/java/io/apicurio/registry/rest/ParametersConflictException.java index 67d8ffca42..5c2071728d 100644 --- a/app/src/main/java/io/apicurio/registry/rest/ParametersConflictException.java +++ b/app/src/main/java/io/apicurio/registry/rest/ParametersConflictException.java @@ -10,7 +10,7 @@ public class ParametersConflictException extends ConflictException { public ParametersConflictException(String parameter1, String parameter2) { super("Conflict: '" + parameter1 + "' and '" + parameter2 + "' are mutually exclusive."); - this.parameters = new String[]{parameter1, parameter2}; + this.parameters = new String[] { parameter1, parameter2 }; } public ParametersConflictException(String... parameters) { diff --git a/app/src/main/java/io/apicurio/registry/rest/RegistryApplicationServletFilter.java b/app/src/main/java/io/apicurio/registry/rest/RegistryApplicationServletFilter.java index 280bddecdf..9e644339af 100644 --- a/app/src/main/java/io/apicurio/registry/rest/RegistryApplicationServletFilter.java +++ b/app/src/main/java/io/apicurio/registry/rest/RegistryApplicationServletFilter.java @@ -17,10 +17,10 @@ /** * This Servlet Filter combines various functionalities that can be configured using config properties: - * Disable APIs: it's possible to provide a list of regular expressions to disable API paths. - * The list of regular expressions will be applied to all incoming requests, if any of them match the request will get a 404 response. - * Note: this is implemented in a servlet to be able to disable the web UI (/ui), because the web is served with Servlets - * + * Disable APIs: it's possible to provide a list of regular expressions to disable API paths. The list of + * regular expressions will be applied to all incoming requests, if any of them match the request will get a + * 404 response. Note: this is implemented in a servlet to be able to disable the web UI (/ui), because the + * web is served with Servlets */ @ApplicationScoped public class RegistryApplicationServletFilter implements Filter { @@ -37,10 +37,12 @@ public class RegistryApplicationServletFilter implements Filter { RegistryExceptionMapperService exceptionMapper; /** - * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, jakarta.servlet.FilterChain) + * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, + * jakarta.servlet.FilterChain) */ @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { HttpServletRequest req = (HttpServletRequest) request; String requestURI = req.getRequestURI(); @@ -52,7 +54,7 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha HttpServletResponse httpResponse = (HttpServletResponse) response; httpResponse.reset(); httpResponse.setStatus(HttpServletResponse.SC_NOT_FOUND); - //important to return, to stop the filters chain + // important to return, to stop the filters chain return; } } diff --git a/app/src/main/java/io/apicurio/registry/rest/RegistryExceptionMapper.java b/app/src/main/java/io/apicurio/registry/rest/RegistryExceptionMapper.java index 67f8139d32..b6ef041cfe 100644 --- a/app/src/main/java/io/apicurio/registry/rest/RegistryExceptionMapper.java +++ b/app/src/main/java/io/apicurio/registry/rest/RegistryExceptionMapper.java @@ -25,7 +25,6 @@ /** * TODO use v1 beans when appropriate (when handling REST API v1 calls) - * */ @ApplicationScoped @Provider @@ -82,15 +81,12 @@ public Response toResponse(Throwable t) { error.setName(null); error.setErrorCode(CONFLUENT_CODE_MAP.getOrDefault(t.getClass(), 0)); } - return builder.type(MediaType.APPLICATION_JSON) - .entity(error) - .build(); + return builder.type(MediaType.APPLICATION_JSON).entity(error).build(); } /** - * Returns true if the endpoint that caused the error is a "ccompat" endpoint. If so - * we need to simplify the error we return. The apicurio error structure has at least - * one additional property. + * Returns true if the endpoint that caused the error is a "ccompat" endpoint. If so we need to simplify + * the error we return. The apicurio error structure has at least one additional property. */ private boolean isCompatEndpoint() { if (this.request != null) { diff --git a/app/src/main/java/io/apicurio/registry/rest/RestConfig.java b/app/src/main/java/io/apicurio/registry/rest/RestConfig.java index 5d04d275aa..c4698027e1 100644 --- a/app/src/main/java/io/apicurio/registry/rest/RestConfig.java +++ b/app/src/main/java/io/apicurio/registry/rest/RestConfig.java @@ -23,9 +23,13 @@ public class RestConfig { @Info(category = "rest", description = "Enables artifact version deletion", availableSince = "2.4.2-SNAPSHOT") Supplier artifactVersionDeletionEnabled; - public int getDownloadMaxSize() { return this.downloadMaxSize; } + public int getDownloadMaxSize() { + return this.downloadMaxSize; + } - public boolean getDownloadSkipSSLValidation() { return this.downloadSkipSSLValidation; } + public boolean getDownloadSkipSSLValidation() { + return this.downloadSkipSSLValidation; + } public boolean isArtifactVersionDeletionEnabled() { return artifactVersionDeletionEnabled.get(); diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/AbstractResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/AbstractResourceImpl.java index 1c13a0f6e3..fd405a9fb6 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/AbstractResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/AbstractResourceImpl.java @@ -25,7 +25,7 @@ import java.util.Map; public abstract class AbstractResourceImpl { - + @Inject Logger log; @@ -44,8 +44,9 @@ public abstract class AbstractResourceImpl { String apiBaseHref; /** - * Handle the content references based on the value of "dereference" - this can mean - * we need to fully dereference the content. + * Handle the content references based on the value of "dereference" - this can mean we need to fully + * dereference the content. + * * @param dereference * @param content */ @@ -53,18 +54,19 @@ protected TypedContent handleContentReferences(boolean dereference, String artif TypedContent content, List references) { // Dereference or rewrite references if (!references.isEmpty() && dereference) { - ArtifactTypeUtilProvider artifactTypeProvider = factory.getArtifactTypeProvider(artifactType); - ContentDereferencer contentDereferencer = artifactTypeProvider.getContentDereferencer(); - Map resolvedReferences = storage.resolveReferences(references); - content = contentDereferencer.dereference(content, resolvedReferences); + ArtifactTypeUtilProvider artifactTypeProvider = factory.getArtifactTypeProvider(artifactType); + ContentDereferencer contentDereferencer = artifactTypeProvider.getContentDereferencer(); + Map resolvedReferences = storage.resolveReferences(references); + content = contentDereferencer.dereference(content, resolvedReferences); } return content; } /** - * Convert the list of references into a list of REST API URLs that point to the content. This means - * that we generate a REST API URL from the GAV (groupId, artifactId, version) information found in - * each reference. + * Convert the list of references into a list of REST API URLs that point to the content. This means that + * we generate a REST API URL from the GAV (groupId, artifactId, version) information found in each + * reference. + * * @param references */ protected Map resolveReferenceUrls(List references) { @@ -83,8 +85,9 @@ protected Map resolveReferenceUrls(List re } /** - * Convert a single artifact reference to a REST API URL. This means that we generate a REST API URL - * from the GAV (groupId, artifactId, version) information found in the reference. + * Convert a single artifact reference to a REST API URL. This means that we generate a REST API URL from + * the GAV (groupId, artifactId, version) information found in the reference. + * * @param reference */ protected String resolveReferenceUrl(ArtifactReferenceDto reference) { @@ -102,7 +105,7 @@ protected String resolveReferenceUrl(ArtifactReferenceDto reference) { this.log.error("Error trying to determine the baseHref of the REST API.", e); return null; } - + if (baseHref == null) { this.log.warn("Failed to determine baseHref for the REST API."); return null; diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/AdminResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/AdminResourceImpl.java index 32dc990320..be126c7004 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/AdminResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/AdminResourceImpl.java @@ -56,7 +56,7 @@ import static io.apicurio.registry.utils.DtoUtil.registryAuthPropertyToApp; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class AdminResourceImpl implements AdminResource { @@ -100,17 +100,13 @@ private static final void requireParameter(String parameterName, Object paramete * @see io.apicurio.registry.rest.v2.AdminResource#listArtifactTypes() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public List listArtifactTypes() { - return factory - .getAllArtifactTypes() - .stream() - .map(t -> { - ArtifactTypeInfo ati = new ArtifactTypeInfo(); - ati.setName(t); - return ati; - }) - .collect(Collectors.toList()); + return factory.getAllArtifactTypes().stream().map(t -> { + ArtifactTypeInfo ati = new ArtifactTypeInfo(); + ati.setName(t); + return ati; + }).collect(Collectors.toList()); } @@ -118,21 +114,19 @@ public List listArtifactTypes() { * @see io.apicurio.registry.rest.v2.AdminResource#listGlobalRules() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public List listGlobalRules() { List rules = storage.getGlobalRules(); List defaultRules = rulesProperties.getFilteredDefaultGlobalRules(rules); - return Stream.concat(rules.stream(), defaultRules.stream()) - .sorted() - .collect(Collectors.toList()); + return Stream.concat(rules.stream(), defaultRules.stream()).sorted().collect(Collectors.toList()); } /** * @see io.apicurio.registry.rest.v2.AdminResource#createGlobalRule(io.apicurio.registry.rest.v2.beans.Rule) */ @Override - @Audited(extractParameters = {"0", KEY_RULE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void createGlobalRule(Rule data) { RuleType type = data.getType(); requireParameter("type", type); @@ -151,7 +145,7 @@ public void createGlobalRule(Rule data) { */ @Override @Audited - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void deleteAllGlobalRules() { storage.deleteGlobalRules(); } @@ -160,7 +154,7 @@ public void deleteAllGlobalRules() { * @see io.apicurio.registry.rest.v2.AdminResource#getGlobalRuleConfig(io.apicurio.registry.types.RuleType) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public Rule getGlobalRuleConfig(RuleType rule) { RuleConfigurationDto dto; try { @@ -179,11 +173,12 @@ public Rule getGlobalRuleConfig(RuleType rule) { } /** - * @see io.apicurio.registry.rest.v2.AdminResource#updateGlobalRuleConfig(io.apicurio.registry.types.RuleType, io.apicurio.registry.rest.v2.beans.Rule) + * @see io.apicurio.registry.rest.v2.AdminResource#updateGlobalRuleConfig(io.apicurio.registry.types.RuleType, + * io.apicurio.registry.rest.v2.beans.Rule) */ @Override - @Audited(extractParameters = {"0", KEY_RULE_TYPE, "1", KEY_RULE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE_TYPE, "1", KEY_RULE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public Rule updateGlobalRuleConfig(RuleType rule, Rule data) { RuleConfigurationDto configDto = new RuleConfigurationDto(); configDto.setConfiguration(data.getConfig()); @@ -208,8 +203,8 @@ public Rule updateGlobalRuleConfig(RuleType rule, Rule data) { * @see io.apicurio.registry.rest.v2.AdminResource#deleteGlobalRule(io.apicurio.registry.types.RuleType) */ @Override - @Audited(extractParameters = {"0", KEY_RULE_TYPE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE_TYPE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void deleteGlobalRule(RuleType rule) { try { storage.deleteGlobalRule(rule); @@ -230,8 +225,9 @@ public void deleteGlobalRule(RuleType rule) { */ @Override @Audited - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) - public void importData(Boolean xRegistryPreserveGlobalId, Boolean xRegistryPreserveContentId, InputStream data) { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) + public void importData(Boolean xRegistryPreserveGlobalId, Boolean xRegistryPreserveContentId, + InputStream data) { final ZipInputStream zip = new ZipInputStream(data, StandardCharsets.UTF_8); final EntityReader reader = new EntityReader(zip); EntityInputStream stream = new EntityInputStream() { @@ -250,20 +246,22 @@ public void close() throws IOException { zip.close(); } }; - this.storage.importData(stream, isNullOrTrue(xRegistryPreserveGlobalId), isNullOrTrue(xRegistryPreserveContentId)); + this.storage.importData(stream, isNullOrTrue(xRegistryPreserveGlobalId), + isNullOrTrue(xRegistryPreserveContentId)); } /** * @see io.apicurio.registry.rest.v2.AdminResource#exportData(java.lang.Boolean) */ @Override - @Audited(extractParameters = {"0", KEY_FOR_BROWSER}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_FOR_BROWSER }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public Response exportData(Boolean forBrowser) { String acceptHeader = request.getHeader("Accept"); if (Boolean.TRUE.equals(forBrowser) || MediaType.APPLICATION_JSON.equals(acceptHeader)) { long expires = System.currentTimeMillis() + (downloadHrefTtl.get() * 1000); - DownloadContextDto downloadCtx = DownloadContextDto.builder().type(DownloadContextType.EXPORT).expires(expires).build(); + DownloadContextDto downloadCtx = DownloadContextDto.builder().type(DownloadContextType.EXPORT) + .expires(expires).build(); String downloadId = storage.createDownload(downloadCtx); String downloadHref = createDownloadHref(downloadId); DownloadRef downloadRef = new DownloadRef(); @@ -279,8 +277,8 @@ public Response exportData(Boolean forBrowser) { * @see io.apicurio.registry.rest.v2.AdminResource#createRoleMapping(io.apicurio.registry.rest.v2.beans.RoleMapping) */ @Override - @Audited(extractParameters = {"0", KEY_ROLE_MAPPING}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_ROLE_MAPPING }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void createRoleMapping(RoleMapping data) { storage.createRoleMapping(data.getPrincipalId(), data.getRole().name(), data.getPrincipalName()); @@ -290,7 +288,7 @@ public void createRoleMapping(RoleMapping data) { * @see io.apicurio.registry.rest.v2.AdminResource#listRoleMappings() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public List listRoleMappings() { List mappings = storage.getRoleMappings(); @@ -303,7 +301,7 @@ public List listRoleMappings() { * @see io.apicurio.registry.rest.v2.AdminResource#getRoleMapping(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public RoleMapping getRoleMapping(String principalId) { RoleMappingDto dto = storage.getRoleMapping(principalId); @@ -311,11 +309,12 @@ public RoleMapping getRoleMapping(String principalId) { } /** - * @see io.apicurio.registry.rest.v2.AdminResource#updateRoleMapping (java.lang.String, io.apicurio.registry.rest.v2.beans.Role) + * @see io.apicurio.registry.rest.v2.AdminResource#updateRoleMapping (java.lang.String, + * io.apicurio.registry.rest.v2.beans.Role) */ @Override - @Audited(extractParameters = {"0", KEY_PRINCIPAL_ID, "1", KEY_UPDATE_ROLE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_PRINCIPAL_ID, "1", KEY_UPDATE_ROLE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void updateRoleMapping(String principalId, UpdateRole data) { requireParameter("principalId", principalId); @@ -327,19 +326,18 @@ public void updateRoleMapping(String principalId, UpdateRole data) { * @see io.apicurio.registry.rest.v2.AdminResource#deleteRoleMapping(java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_PRINCIPAL_ID}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_PRINCIPAL_ID }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void deleteRoleMapping(String principalId) { storage.deleteRoleMapping(principalId); } - /** * @see io.apicurio.registry.rest.v2.AdminResource#listConfigProperties() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public List listConfigProperties() { // Query the DB for the set of configured properties. List props = storage.getConfigProperties(); @@ -348,11 +346,14 @@ public List listConfigProperties() { Map propsI = new HashMap<>(); props.forEach(dto -> propsI.put(dto.getName(), dto)); - // Return value is the set of all dynamic config properties, with either configured or default values (depending + // Return value is the set of all dynamic config properties, with either configured or default values + // (depending // on whether the value is actually configured and stored in the DB or not). return dynamicPropertyIndex.getAcceptedPropertyNames().stream() .sorted((pname1, pname2) -> pname1.compareTo(pname2)) - .map(pname -> propsI.containsKey(pname) ? dtoToConfigurationProperty(dynamicPropertyIndex.getProperty(pname), propsI.get(pname)) : defToConfigurationProperty(dynamicPropertyIndex.getProperty(pname))) + .map(pname -> propsI.containsKey(pname) + ? dtoToConfigurationProperty(dynamicPropertyIndex.getProperty(pname), propsI.get(pname)) + : defToConfigurationProperty(dynamicPropertyIndex.getProperty(pname))) .collect(Collectors.toList()); } @@ -360,7 +361,7 @@ public List listConfigProperties() { * @see io.apicurio.registry.rest.v2.AdminResource#getConfigProperty(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public ConfigurationProperty getConfigProperty(String propertyName) { // Ensure that the property is a valid dynamic config property. DynamicConfigPropertyDef def = resolveConfigProperty(propertyName); @@ -374,10 +375,11 @@ public ConfigurationProperty getConfigProperty(String propertyName) { } /** - * @see io.apicurio.registry.rest.v2.AdminResource#updateConfigProperty(java.lang.String, io.apicurio.registry.rest.v2.beans.UpdateConfigurationProperty) + * @see io.apicurio.registry.rest.v2.AdminResource#updateConfigProperty(java.lang.String, + * io.apicurio.registry.rest.v2.beans.UpdateConfigurationProperty) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void updateConfigProperty(String propertyName, UpdateConfigurationProperty data) { DynamicConfigPropertyDef propertyDef = resolveConfigProperty(propertyName); validateConfigPropertyValue(propertyDef, data.getValue()); @@ -392,8 +394,8 @@ public void updateConfigProperty(String propertyName, UpdateConfigurationPropert * @see io.apicurio.registry.rest.v2.AdminResource#resetConfigProperty(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) - @Audited(extractParameters = {"0", KEY_NAME}) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_NAME }) public void resetConfigProperty(String propertyName) { // Check if the config property exists. resolveConfigProperty(propertyName); @@ -409,7 +411,6 @@ private static RoleMapping dtoToRoleMapping(RoleMappingDto dto) { return mapping; } - private static boolean isNullOrTrue(Boolean value) { return value == null || value; } @@ -418,7 +419,8 @@ private String createDownloadHref(String downloadId) { return "/apis/registry/v2/downloads/" + downloadId; } - private static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigPropertyDef def, DynamicConfigPropertyDto dto) { + private static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigPropertyDef def, + DynamicConfigPropertyDto dto) { ConfigurationProperty rval = new ConfigurationProperty(); rval.setName(def.getName()); rval.setValue(dto.getValue()); @@ -429,7 +431,8 @@ private static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigPro } private ConfigurationProperty defToConfigurationProperty(DynamicConfigPropertyDef def) { - String propertyValue = config.getOptionalValue(def.getName(), String.class).orElse(def.getDefaultValue()); + String propertyValue = config.getOptionalValue(def.getName(), String.class) + .orElse(def.getDefaultValue()); ConfigurationProperty rval = new ConfigurationProperty(); rval.setName(appAuthPropertyToRegistry(def.getName())); @@ -441,8 +444,9 @@ private ConfigurationProperty defToConfigurationProperty(DynamicConfigPropertyDe } /** - * Lookup the dynamic configuration property being set. Ensure that it exists (throws - * a {@link NotFoundException} if it does not. + * Lookup the dynamic configuration property being set. Ensure that it exists (throws a + * {@link NotFoundException} if it does not. + * * @param propertyName the name of the dynamic property * @return the dynamic config property definition */ @@ -452,7 +456,7 @@ private DynamicConfigPropertyDef resolveConfigProperty(String propertyName) { if (property == null) { propertyName = registryAuthPropertyToApp(propertyName); } - //If registry property cannot be found, try with app property + // If registry property cannot be found, try with app property property = dynamicPropertyIndex.getProperty(propertyName); if (property == null) { @@ -466,14 +470,16 @@ private DynamicConfigPropertyDef resolveConfigProperty(String propertyName) { } /** - * Ensure that the value being set on the given property is value for the property type. - * For example, this should fail + * Ensure that the value being set on the given property is value for the property type. For example, this + * should fail + * * @param propertyDef the dynamic config property definition * @param value the config property value */ private void validateConfigPropertyValue(DynamicConfigPropertyDef propertyDef, String value) { if (!propertyDef.isValidValue(value)) { - throw new InvalidPropertyValueException("Invalid dynamic configuration property value for: " + propertyDef.getName()); + throw new InvalidPropertyValueException( + "Invalid dynamic configuration property value for: " + propertyDef.getName()); } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/DownloadsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/DownloadsResourceImpl.java index 0c32a49533..4e1139a8ea 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/DownloadsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/DownloadsResourceImpl.java @@ -22,7 +22,7 @@ import jakarta.ws.rs.core.Response; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged @Path("/apis/registry/v2/downloads") public class DownloadsResourceImpl { @@ -34,7 +34,7 @@ public class DownloadsResourceImpl { @Inject DataExporter exporter; - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) @GET @Path("{downloadId}") @Produces("*/*") @@ -50,15 +50,15 @@ public Response download(@PathParam("downloadId") String downloadId) { } /** - * A duplicate version of the above that will allow a filename to be added - * for download purposes. So e.g. /apis/registry/v2/downloads/ABCD-1234 can - * be aliased as /apis/registry/v2/downloads/ABCD-1234/export.zip and work - * the same way. But when saving from a browser, the filename should be - * useful. + * A duplicate version of the above that will allow a filename to be added for download purposes. So e.g. + * /apis/registry/v2/downloads/ABCD-1234 can be aliased as + * /apis/registry/v2/downloads/ABCD-1234/export.zip and work the same way. But when saving from a browser, + * the filename should be useful. + * * @param downloadId * @return */ - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) @GET @Path("{downloadId}/{fileName}") @Produces("*/*") diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/GroupsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/GroupsResourceImpl.java index 68b0efec23..3d7469f7cb 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/GroupsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/GroupsResourceImpl.java @@ -126,10 +126,9 @@ /** * Implements the {@link GroupsResource} JAX-RS interface. - * */ @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class GroupsResourceImpl extends AbstractResourceImpl implements GroupsResource { @@ -151,12 +150,13 @@ public class GroupsResourceImpl extends AbstractResourceImpl implements GroupsRe @Inject CommonResourceOperations common; - + @Inject io.apicurio.registry.rest.v3.GroupsResourceImpl v3; /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getLatestArtifact(java.lang.String, java.lang.String, Boolean) + * @see io.apicurio.registry.rest.v2.GroupsResource#getLatestArtifact(java.lang.String, java.lang.String, + * Boolean) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -169,14 +169,21 @@ public Response getLatestArtifact(String groupId, String artifactId, Boolean der } try { - GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, RetrievalBehavior.SKIP_DISABLED_LATEST); - ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData(latestGAV.getRawGroupIdWithNull(), latestGAV.getRawArtifactId(), latestGAV.getRawVersionId()); - StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(defaultGroupIdToNull(groupId), artifactId, latestGAV.getRawVersionId()); - - TypedContent contentToReturn = TypedContent.create(artifact.getContent(), artifact.getContentType()); - contentToReturn = handleContentReferences(dereference, metaData.getArtifactType(), contentToReturn, artifact.getReferences()); - - Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), contentToReturn.getContentType()); + GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, + RetrievalBehavior.SKIP_DISABLED_LATEST); + ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData( + latestGAV.getRawGroupIdWithNull(), latestGAV.getRawArtifactId(), + latestGAV.getRawVersionId()); + StoredArtifactVersionDto artifact = storage.getArtifactVersionContent( + defaultGroupIdToNull(groupId), artifactId, latestGAV.getRawVersionId()); + + TypedContent contentToReturn = TypedContent.create(artifact.getContent(), + artifact.getContentType()); + contentToReturn = handleContentReferences(dereference, metaData.getArtifactType(), + contentToReturn, artifact.getReferences()); + + Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), + contentToReturn.getContentType()); checkIfDeprecated(metaData::getState, groupId, artifactId, metaData.getVersion(), builder); return builder.build(); } catch (VersionNotFoundException e) { @@ -185,54 +192,67 @@ public Response getLatestArtifact(String groupId, String artifactId, Boolean der } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifact(String, String, String, String, String, String, String, InputStream) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifact(String, String, String, String, String, + * String, String, InputStream) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, "4", KEY_NAME_ENCODED, "5", KEY_DESCRIPTION, "6", KEY_DESCRIPTION_ENCODED}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, + "4", KEY_NAME_ENCODED, "5", KEY_DESCRIPTION, "6", KEY_DESCRIPTION_ENCODED }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public ArtifactMetaData updateArtifact(String groupId, String artifactId, String xRegistryVersion, - String xRegistryName, String xRegistryNameEncoded, String xRegistryDescription, - String xRegistryDescriptionEncoded, InputStream data) { - return this.updateArtifactWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, xRegistryNameEncoded, xRegistryDescription, xRegistryDescriptionEncoded, data, Collections.emptyList()); + String xRegistryName, String xRegistryNameEncoded, String xRegistryDescription, + String xRegistryDescriptionEncoded, InputStream data) { + return this.updateArtifactWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, + xRegistryNameEncoded, xRegistryDescription, xRegistryDescriptionEncoded, data, + Collections.emptyList()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifact(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.ArtifactContent) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifact(java.lang.String, java.lang.String, + * java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, + * io.apicurio.registry.rest.v2.beans.ArtifactContent) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, "4", KEY_NAME_ENCODED, "5", KEY_DESCRIPTION, "6", KEY_DESCRIPTION_ENCODED}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, + "4", KEY_NAME_ENCODED, "5", KEY_DESCRIPTION, "6", KEY_DESCRIPTION_ENCODED }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public ArtifactMetaData updateArtifact(String groupId, String artifactId, String xRegistryVersion, - String xRegistryName, String xRegistryNameEncoded, String xRegistryDescription, - String xRegistryDescriptionEncoded, ArtifactContent data) { + String xRegistryName, String xRegistryNameEncoded, String xRegistryDescription, + String xRegistryDescriptionEncoded, ArtifactContent data) { requireParameter("content", data.getContent()); - return this.updateArtifactWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, xRegistryNameEncoded, xRegistryDescription, xRegistryDescriptionEncoded, IoUtil.toStream(data.getContent()), data.getReferences()); + return this.updateArtifactWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, + xRegistryNameEncoded, xRegistryDescription, xRegistryDescriptionEncoded, + IoUtil.toStream(data.getContent()), data.getReferences()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionReferences(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.types.ReferenceType) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionReferences(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.types.ReferenceType) */ @Override public List getArtifactVersionReferences(String groupId, String artifactId, String version, ReferenceType refType) { if (refType == null || refType == ReferenceType.OUTBOUND) { - return storage.getArtifactVersionContent(defaultGroupIdToNull(groupId), artifactId, version).getReferences().stream() - .map(V2ApiUtil::referenceDtoToReference) + return storage.getArtifactVersionContent(defaultGroupIdToNull(groupId), artifactId, version) + .getReferences().stream().map(V2ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } else { - return storage.getInboundArtifactReferences(defaultGroupIdToNull(groupId), artifactId, version).stream() - .map(V2ApiUtil::referenceDtoToReference) - .collect(Collectors.toList()); + return storage.getInboundArtifactReferences(defaultGroupIdToNull(groupId), artifactId, version) + .stream().map(V2ApiUtil::referenceDtoToReference).collect(Collectors.toList()); } } - private ArtifactMetaData updateArtifactWithRefs(String groupId, String artifactId, String xRegistryVersion, String xRegistryName, String xRegistryNameEncoded, String xRegistryDescription, String xRegistryDescriptionEncoded, InputStream data, List references) { + private ArtifactMetaData updateArtifactWithRefs(String groupId, String artifactId, + String xRegistryVersion, String xRegistryName, String xRegistryNameEncoded, + String xRegistryDescription, String xRegistryDescriptionEncoded, InputStream data, + List references) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); maxOneOf("X-Registry-Name", xRegistryName, "X-Registry-Name-Encoded", xRegistryNameEncoded); - maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", xRegistryDescriptionEncoded); + maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", + xRegistryDescriptionEncoded); String artifactName = getOneOf(xRegistryName, decode(xRegistryNameEncoded)); String artifactDescription = getOneOf(xRegistryDescription, decode(xRegistryDescriptionEncoded)); @@ -241,14 +261,15 @@ private ArtifactMetaData updateArtifactWithRefs(String groupId, String artifactI if (content.bytes().length == 0) { throw new BadRequestException(EMPTY_CONTENT_ERROR_MESSAGE); } - return updateArtifactInternal(groupId, artifactId, xRegistryVersion, artifactName, artifactDescription, content, getContentType(), references); + return updateArtifactInternal(groupId, artifactId, xRegistryVersion, artifactName, + artifactDescription, content, getContentType(), references); } /** * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifact(java.lang.String, java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifact(String groupId, String artifactId) { requireParameter("groupId", groupId); @@ -258,7 +279,8 @@ public void deleteArtifact(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactMetaData(java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactMetaData(java.lang.String, + * java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -267,10 +289,13 @@ public ArtifactMetaData getArtifactMetaData(String groupId, String artifactId) { requireParameter("artifactId", artifactId); ArtifactMetaDataDto dto = storage.getArtifactMetaData(defaultGroupIdToNull(groupId), artifactId); - GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, RetrievalBehavior.SKIP_DISABLED_LATEST); - ArtifactVersionMetaDataDto vdto = storage.getArtifactVersionMetaData(latestGAV.getRawGroupIdWithNull(), latestGAV.getRawArtifactId(), latestGAV.getRawVersionId()); - - ArtifactMetaData amd = V2ApiUtil.dtoToMetaData(defaultGroupIdToNull(groupId), artifactId, dto.getArtifactType(), dto); + GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, + RetrievalBehavior.SKIP_DISABLED_LATEST); + ArtifactVersionMetaDataDto vdto = storage.getArtifactVersionMetaData( + latestGAV.getRawGroupIdWithNull(), latestGAV.getRawArtifactId(), latestGAV.getRawVersionId()); + + ArtifactMetaData amd = V2ApiUtil.dtoToMetaData(defaultGroupIdToNull(groupId), artifactId, + dto.getArtifactType(), dto); amd.setContentId(vdto.getContentId()); amd.setGlobalId(vdto.getGlobalId()); amd.setVersion(vdto.getVersion()); @@ -285,18 +310,18 @@ public ArtifactMetaData getArtifactMetaData(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactMetaData(java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.EditableMetaData) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactMetaData(java.lang.String, + * java.lang.String, io.apicurio.registry.rest.v2.beans.EditableMetaData) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_EDITABLE_METADATA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_EDITABLE_METADATA }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void updateArtifactMetaData(String groupId, String artifactId, EditableMetaData data) { - GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, RetrievalBehavior.DEFAULT); - storage.updateArtifactVersionMetaData(groupId, artifactId, latestGAV.getRawVersionId(), EditableVersionMetaDataDto.builder() - .name(data.getName()) - .description(data.getDescription()) - .labels(V2ApiUtil.toV3Labels(data.getLabels(), data.getProperties())) - .build()); + GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, + RetrievalBehavior.DEFAULT); + storage.updateArtifactVersionMetaData(groupId, artifactId, latestGAV.getRawVersionId(), + EditableVersionMetaDataDto.builder().name(data.getName()).description(data.getDescription()) + .labels(V2ApiUtil.toV3Labels(data.getLabels(), data.getProperties())).build()); } @Override @@ -312,7 +337,7 @@ public ArtifactOwner getArtifactOwner(String groupId, String artifactId) { } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_OWNER}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_OWNER }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.AdminOrOwner) public void updateArtifactOwner(String groupId, String artifactId, ArtifactOwner data) { requireParameter("groupId", groupId); @@ -323,8 +348,7 @@ public void updateArtifactOwner(String groupId, String artifactId, ArtifactOwner throw new MissingRequiredParameterException("Missing required owner"); } - EditableArtifactMetaDataDto emd = EditableArtifactMetaDataDto.builder() - .owner(data.getOwner()) + EditableArtifactMetaDataDto emd = EditableArtifactMetaDataDto.builder().owner(data.getOwner()) .build(); storage.updateArtifactMetaData(defaultGroupIdToNull(groupId), artifactId, emd); } @@ -344,7 +368,8 @@ public void deleteGroupById(String groupId) { @Override @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) - public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOrder order, SortBy orderby) { + public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOrder order, + SortBy orderby) { if (orderby == null) { orderby = SortBy.name; } @@ -356,21 +381,21 @@ public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOr } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = Collections.emptySet(); - GroupSearchResultsDto resultsDto = storage.searchGroups(filters, oBy, oDir, offset.intValue(), limit.intValue()); + GroupSearchResultsDto resultsDto = storage.searchGroups(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V2ApiUtil.dtoToSearchResults(resultsDto); } @Override @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Write) public GroupMetaData createGroup(CreateGroupMetaData data) { - GroupMetaDataDto.GroupMetaDataDtoBuilder group = GroupMetaDataDto.builder() - .groupId(data.getId()) - .description(data.getDescription()) - .labels(data.getProperties()); + GroupMetaDataDto.GroupMetaDataDtoBuilder group = GroupMetaDataDto.builder().groupId(data.getId()) + .description(data.getDescription()).labels(data.getProperties()); String user = securityIdentity.getPrincipal().getName(); group.owner(user).createdOn(new Date().getTime()); @@ -382,20 +407,26 @@ public GroupMetaData createGroup(CreateGroupMetaData data) { @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, Boolean canonical, ArtifactContent artifactContent) { - return getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, IoUtil.toStream(artifactContent.getContent()), artifactContent.getReferences()); + public VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, + Boolean canonical, ArtifactContent artifactContent) { + return getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, + IoUtil.toStream(artifactContent.getContent()), artifactContent.getReferences()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionMetaDataByContent(java.lang.String, java.lang.String, java.lang.Boolean, java.io.InputStream) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionMetaDataByContent(java.lang.String, + * java.lang.String, java.lang.Boolean, java.io.InputStream) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, Boolean canonical, InputStream data) { - return getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, data, Collections.emptyList()); + public VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, + Boolean canonical, InputStream data) { + return getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, data, + Collections.emptyList()); } - private VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, Boolean canonical, InputStream data, List artifactReferences) { + private VersionMetaData getArtifactVersionMetaDataByContent(String groupId, String artifactId, + Boolean canonical, InputStream data, List artifactReferences) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); @@ -416,9 +447,10 @@ private VersionMetaData getArtifactVersionMetaDataByContent(String groupId, Stri final List artifactReferenceDtos = toReferenceDtos(artifactReferences); TypedContent typedContent = TypedContent.create(content, contentType); - ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaDataByContent(defaultGroupIdToNull(groupId), - artifactId, canonical, typedContent, artifactReferenceDtos); - return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, dto.getArtifactType(), dto); + ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaDataByContent( + defaultGroupIdToNull(groupId), artifactId, canonical, typedContent, artifactReferenceDtos); + return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, + dto.getArtifactType(), dto); } /** @@ -434,10 +466,11 @@ public List listArtifactRules(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactRule(java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.Rule) + * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactRule(java.lang.String, java.lang.String, + * io.apicurio.registry.rest.v2.beans.Rule) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void createArtifactRule(String groupId, String artifactId, Rule data) { requireParameter("groupId", groupId); @@ -461,10 +494,11 @@ public void createArtifactRule(String groupId, String artifactId, Rule data) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactRules(java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactRules(java.lang.String, + * java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactRules(String groupId, String artifactId) { requireParameter("groupId", groupId); @@ -474,7 +508,8 @@ public void deleteArtifactRules(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactRuleConfig(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactRuleConfig(java.lang.String, + * java.lang.String, io.apicurio.registry.types.RuleType) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -491,10 +526,12 @@ public Rule getArtifactRuleConfig(String groupId, String artifactId, RuleType ru } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactRuleConfig(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType, io.apicurio.registry.rest.v2.beans.Rule) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactRuleConfig(java.lang.String, + * java.lang.String, io.apicurio.registry.types.RuleType, io.apicurio.registry.rest.v2.beans.Rule) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE, "3", KEY_RULE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE, "3", + KEY_RULE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public Rule updateArtifactRuleConfig(String groupId, String artifactId, RuleType rule, Rule data) { requireParameter("groupId", groupId); @@ -510,10 +547,11 @@ public Rule updateArtifactRuleConfig(String groupId, String artifactId, RuleType } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactRule(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType) + * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactRule(java.lang.String, java.lang.String, + * io.apicurio.registry.types.RuleType) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) { requireParameter("groupId", groupId); @@ -524,24 +562,26 @@ public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactState(java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.UpdateState) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactState(java.lang.String, + * java.lang.String, io.apicurio.registry.rest.v2.beans.UpdateState) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_UPDATE_STATE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_UPDATE_STATE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void updateArtifactState(String groupId, String artifactId, UpdateState data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("body.state", data.getState()); - - // Possible race condition here. Worst case should be that the update fails with a reasonable message. + + // Possible race condition here. Worst case should be that the update fails with a reasonable message. GAV latestGAV = storage.getBranchTip(new GA(defaultGroupIdToNull(groupId), artifactId), BranchId.LATEST, RetrievalBehavior.DEFAULT); updateArtifactVersionState(groupId, artifactId, latestGAV.getRawVersionId(), data); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#testUpdateArtifact(java.lang.String, java.lang.String, java.io.InputStream) + * @see io.apicurio.registry.rest.v2.GroupsResource#testUpdateArtifact(java.lang.String, java.lang.String, + * java.io.InputStream) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) @@ -562,7 +602,10 @@ public void testUpdateArtifact(String groupId, String artifactId, InputStream da String artifactType = lookupArtifactType(groupId, artifactId); TypedContent typedContent = TypedContent.create(content, ct); rulesService.applyRules(defaultGroupIdToNull(groupId), artifactId, artifactType, typedContent, - RuleApplicationType.UPDATE, Collections.emptyList(), Collections.emptyMap()); //TODO:references not supported for testing update + RuleApplicationType.UPDATE, Collections.emptyList(), Collections.emptyMap()); // TODO:references + // not supported + // for testing + // update } /** @@ -570,7 +613,8 @@ public void testUpdateArtifact(String groupId, String artifactId, InputStream da */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public Response getArtifactVersion(String groupId, String artifactId, String version, Boolean dereference) { + public Response getArtifactVersion(String groupId, String artifactId, String version, + Boolean dereference) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); @@ -579,28 +623,34 @@ public Response getArtifactVersion(String groupId, String artifactId, String ver dereference = Boolean.FALSE; } - ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData(defaultGroupIdToNull(groupId), artifactId, version); + ArtifactVersionMetaDataDto metaData = storage + .getArtifactVersionMetaData(defaultGroupIdToNull(groupId), artifactId, version); if (VersionState.DISABLED.equals(metaData.getState())) { throw new VersionNotFoundException(groupId, artifactId, version); } - StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(defaultGroupIdToNull(groupId), artifactId, version); + StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(defaultGroupIdToNull(groupId), + artifactId, version); TypedContent contentToReturn = TypedContent.create(artifact.getContent(), artifact.getContentType()); - contentToReturn = handleContentReferences(dereference, metaData.getArtifactType(), contentToReturn, artifact.getReferences()); + contentToReturn = handleContentReferences(dereference, metaData.getArtifactType(), contentToReturn, + artifact.getReferences()); - Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), contentToReturn.getContentType()); + Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), + contentToReturn.getContentType()); checkIfDeprecated(metaData::getState, groupId, artifactId, version, builder); return builder.build(); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersion(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersion(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactVersion(String groupId, String artifactId, String version) { if (!restConfig.isArtifactVersionDeletionEnabled()) { - throw new NotAllowedException("Artifact version deletion operation is not enabled.", HttpMethod.GET, (String[]) null); + throw new NotAllowedException("Artifact version deletion operation is not enabled.", + HttpMethod.GET, (String[]) null); } requireParameter("groupId", groupId); @@ -611,7 +661,8 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -620,65 +671,74 @@ public VersionMetaData getArtifactVersionMetaData(String groupId, String artifac requireParameter("artifactId", artifactId); requireParameter("version", version); - ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaData(defaultGroupIdToNull(groupId), artifactId, version); - return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, dto.getArtifactType(), dto); + ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaData(defaultGroupIdToNull(groupId), + artifactId, version); + return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, + dto.getArtifactType(), dto); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.EditableMetaData) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.EditableMetaData) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_EDITABLE_METADATA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + KEY_EDITABLE_METADATA }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableMetaData data) { - v3.updateArtifactVersionMetaData(groupId, artifactId, version, io.apicurio.registry.rest.v3.beans.EditableVersionMetaData.builder() - .description(data.getDescription()) - .labels(V2ApiUtil.toV3Labels(data.getLabels(), data.getProperties())) - .name(data.getName()) - .build()); + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableMetaData data) { + v3.updateArtifactVersionMetaData(groupId, artifactId, version, + io.apicurio.registry.rest.v3.beans.EditableVersionMetaData.builder() + .description(data.getDescription()) + .labels(V2ApiUtil.toV3Labels(data.getLabels(), data.getProperties())) + .name(data.getName()).build()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactVersionMetaData(String groupId, String artifactId, String version) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); - EditableVersionMetaDataDto vmd = EditableVersionMetaDataDto.builder() - .name("") - .description("") - .labels(Map.of()) - .build(); + EditableVersionMetaDataDto vmd = EditableVersionMetaDataDto.builder().name("").description("") + .labels(Map.of()).build(); storage.updateArtifactVersionMetaData(defaultGroupIdToNull(groupId), artifactId, version, vmd); } - + /** - * @see io.apicurio.registry.rest.v2.GroupsResource#addArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.NewComment) + * @see io.apicurio.registry.rest.v2.GroupsResource#addArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.NewComment) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public Comment addArtifactVersionComment(String groupId, String artifactId, String version, NewComment data) { + public Comment addArtifactVersionComment(String groupId, String artifactId, String version, + NewComment data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); - - CommentDto newComment = storage.createArtifactVersionComment(defaultGroupIdToNull(groupId), artifactId, version, data.getValue()); + + CommentDto newComment = storage.createArtifactVersionComment(defaultGroupIdToNull(groupId), + artifactId, version, data.getValue()); return V2ApiUtil.commentDtoToComment(newComment); } - + /** - * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", "comment_id"}) // TODO + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + "comment_id" }) // TODO @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); @@ -686,9 +746,10 @@ public void deleteArtifactVersionComment(String groupId, String artifactId, Stri storage.deleteArtifactVersionComment(defaultGroupIdToNull(groupId), artifactId, version, commentId); } - + /** - * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionComments(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v2.GroupsResource#getArtifactVersionComments(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -698,50 +759,57 @@ public List getArtifactVersionComments(String groupId, String artifactI requireParameter("version", version); return storage.getArtifactVersionComments(defaultGroupIdToNull(groupId), artifactId, version).stream() - .map(V2ApiUtil::commentDtoToComment) - .collect(Collectors.toList()); + .map(V2ApiUtil::commentDtoToComment).collect(Collectors.toList()); } - + /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.NewComment) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String, + * io.apicurio.registry.rest.v2.beans.NewComment) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", "comment_id"}) // TODO + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + "comment_id" }) // TODO @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, NewComment data) { + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, NewComment data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); requireParameter("commentId", commentId); requireParameter("value", data.getValue()); - storage.updateArtifactVersionComment(defaultGroupIdToNull(groupId), artifactId, version, commentId, data.getValue()); + storage.updateArtifactVersionComment(defaultGroupIdToNull(groupId), artifactId, version, commentId, + data.getValue()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionState(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.UpdateState) + * @see io.apicurio.registry.rest.v2.GroupsResource#updateArtifactVersionState(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.UpdateState) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_UPDATE_STATE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + KEY_UPDATE_STATE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateArtifactVersionState(String groupId, String artifactId, String version, UpdateState data) { + public void updateArtifactVersionState(String groupId, String artifactId, String version, + UpdateState data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("version", version); EditableVersionMetaDataDto emd = EditableVersionMetaDataDto.builder() - .state(VersionState.fromValue(data.getState().name())) - .build(); + .state(VersionState.fromValue(data.getState().name())).build(); storage.updateArtifactVersionMetaData(defaultGroupIdToNull(groupId), artifactId, version, emd); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#listArtifactsInGroup(String, BigInteger, BigInteger, SortOrder, SortBy) + * @see io.apicurio.registry.rest.v2.GroupsResource#listArtifactsInGroup(String, BigInteger, BigInteger, + * SortOrder, SortBy) */ @Override @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Read) public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger limit, BigInteger offset, - SortOrder order, SortBy orderby) { + SortOrder order, SortBy orderby) { requireParameter("groupId", groupId); if (orderby == null) { @@ -755,12 +823,14 @@ public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger lim } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet<>(); filters.add(SearchFilter.ofGroupId(defaultGroupIdToNull(groupId))); - ArtifactSearchResultsDto resultsDto = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto resultsDto = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V2ApiUtil.dtoToSearchResults(resultsDto); } @@ -768,7 +838,7 @@ public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger lim * @see io.apicurio.registry.rest.v2.GroupsResource#deleteArtifactsInGroup(java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID }) @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write) public void deleteArtifactsInGroup(String groupId) { requireParameter("groupId", groupId); @@ -777,30 +847,41 @@ public void deleteArtifactsInGroup(String groupId) { } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifact(String, String, String, String, IfExists, Boolean, String, String, String, String, String, String, InputStream) + * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifact(String, String, String, String, + * IfExists, Boolean, String, String, String, String, String, String, InputStream) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_TYPE, "2", KEY_ARTIFACT_ID, "3", KEY_VERSION, "4", KEY_IF_EXISTS, "5", KEY_CANONICAL, "6", KEY_DESCRIPTION, "7", KEY_DESCRIPTION_ENCODED, "8", KEY_NAME, "9", KEY_NAME_ENCODED, "10", KEY_FROM_URL, "11", KEY_SHA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_TYPE, "2", KEY_ARTIFACT_ID, "3", + KEY_VERSION, "4", KEY_IF_EXISTS, "5", KEY_CANONICAL, "6", KEY_DESCRIPTION, "7", + KEY_DESCRIPTION_ENCODED, "8", KEY_NAME, "9", KEY_NAME_ENCODED, "10", KEY_FROM_URL, "11", + KEY_SHA }) @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write) - public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactType, String xRegistryArtifactId, - String xRegistryVersion, IfExists ifExists, Boolean canonical, - String xRegistryDescription, String xRegistryDescriptionEncoded, - String xRegistryName, String xRegistryNameEncoded, - String xRegistryContentHash, String xRegistryHashAlgorithm, InputStream data) { - return this.createArtifactWithRefs(groupId, xRegistryArtifactType, xRegistryArtifactId, xRegistryVersion, ifExists, canonical, xRegistryDescription, xRegistryDescriptionEncoded, xRegistryName, xRegistryNameEncoded, xRegistryContentHash, xRegistryHashAlgorithm, data, Collections.emptyList()); + public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactType, + String xRegistryArtifactId, String xRegistryVersion, IfExists ifExists, Boolean canonical, + String xRegistryDescription, String xRegistryDescriptionEncoded, String xRegistryName, + String xRegistryNameEncoded, String xRegistryContentHash, String xRegistryHashAlgorithm, + InputStream data) { + return this.createArtifactWithRefs(groupId, xRegistryArtifactType, xRegistryArtifactId, + xRegistryVersion, ifExists, canonical, xRegistryDescription, xRegistryDescriptionEncoded, + xRegistryName, xRegistryNameEncoded, xRegistryContentHash, xRegistryHashAlgorithm, data, + Collections.emptyList()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifact(String, String, String, String, IfExists, Boolean, String, String, String, String, String, String, ArtifactContent) + * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifact(String, String, String, String, + * IfExists, Boolean, String, String, String, String, String, String, ArtifactContent) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_TYPE, "2", KEY_ARTIFACT_ID, "3", KEY_VERSION, "4", KEY_IF_EXISTS, "5", KEY_CANONICAL, "6", KEY_DESCRIPTION, "7", KEY_DESCRIPTION_ENCODED, "8", KEY_NAME, "9", KEY_NAME_ENCODED, "10", KEY_FROM_URL, "11", KEY_SHA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_TYPE, "2", KEY_ARTIFACT_ID, "3", + KEY_VERSION, "4", KEY_IF_EXISTS, "5", KEY_CANONICAL, "6", KEY_DESCRIPTION, "7", + KEY_DESCRIPTION_ENCODED, "8", KEY_NAME, "9", KEY_NAME_ENCODED, "10", KEY_FROM_URL, "11", + KEY_SHA }) @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write) - public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactType, String xRegistryArtifactId, - String xRegistryVersion, IfExists ifExists, Boolean canonical, - String xRegistryDescription, String xRegistryDescriptionEncoded, - String xRegistryName, String xRegistryNameEncoded, - String xRegistryContentHash, String xRegistryHashAlgorithm, ArtifactContent data) { + public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactType, + String xRegistryArtifactId, String xRegistryVersion, IfExists ifExists, Boolean canonical, + String xRegistryDescription, String xRegistryDescriptionEncoded, String xRegistryName, + String xRegistryNameEncoded, String xRegistryContentHash, String xRegistryHashAlgorithm, + ArtifactContent data) { requireParameter("content", data.getContent()); Client client = null; @@ -814,7 +895,10 @@ public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactT content = IoUtil.toStream(data.getContent()); } - return this.createArtifactWithRefs(groupId, xRegistryArtifactType, xRegistryArtifactId, xRegistryVersion, ifExists, canonical, xRegistryDescription, xRegistryDescriptionEncoded, xRegistryName, xRegistryNameEncoded, xRegistryContentHash, xRegistryHashAlgorithm, content, data.getReferences()); + return this.createArtifactWithRefs(groupId, xRegistryArtifactType, xRegistryArtifactId, + xRegistryVersion, ifExists, canonical, xRegistryDescription, xRegistryDescriptionEncoded, + xRegistryName, xRegistryNameEncoded, xRegistryContentHash, xRegistryHashAlgorithm, + content, data.getReferences()); } catch (KeyManagementException kme) { throw new RuntimeException(kme); } catch (NoSuchAlgorithmException nsae) { @@ -827,8 +911,7 @@ public ArtifactMetaData createArtifact(String groupId, String xRegistryArtifactT } public enum RegistryHashAlgorithm { - SHA256, - MD5 + SHA256, MD5 } /** @@ -839,35 +922,32 @@ public enum RegistryHashAlgorithm { private InputStream fetchContentFromURL(Client client, URI url) { try { // 1. Registry issues HTTP HEAD request to the target URL. - List contentLengthHeaders = client - .target(url) - .request() - .head() - .getHeaders() + List contentLengthHeaders = client.target(url).request().head().getHeaders() .get("Content-Length"); if (contentLengthHeaders == null || contentLengthHeaders.size() < 1) { - throw new BadRequestException("Requested resource URL does not provide 'Content-Length' in the headers"); + throw new BadRequestException( + "Requested resource URL does not provide 'Content-Length' in the headers"); } // 2. According to HTTP specification, target server must return Content-Length header. int contentLength = Integer.parseInt(contentLengthHeaders.get(0).toString()); - // 3. Registry analyzes value of Content-Length to check if file with declared size could be processed securely. + // 3. Registry analyzes value of Content-Length to check if file with declared size could be + // processed securely. if (contentLength > restConfig.getDownloadMaxSize()) { - throw new BadRequestException("Requested resource is bigger than " + restConfig.getDownloadMaxSize() + " and cannot be downloaded."); + throw new BadRequestException("Requested resource is bigger than " + + restConfig.getDownloadMaxSize() + " and cannot be downloaded."); } if (contentLength <= 0) { throw new BadRequestException("Requested resource URL is providing 'Content-Length' <= 0."); } - // 4. Finally, registry issues HTTP GET to the target URL and fetches only amount of bytes specified by HTTP HEAD from step 1. - return new BufferedInputStream(client - .target(url) - .request() - .get() - .readEntity(InputStream.class), contentLength); + // 4. Finally, registry issues HTTP GET to the target URL and fetches only amount of bytes + // specified by HTTP HEAD from step 1. + return new BufferedInputStream(client.target(url).request().get().readEntity(InputStream.class), + contentLength); } catch (BadRequestException bre) { throw bre; } catch (Exception e) { @@ -876,7 +956,7 @@ private InputStream fetchContentFromURL(Client client, URI url) { } /** - * Creates an artifact with references. Shared by both variants of createArtifact. + * Creates an artifact with references. Shared by both variants of createArtifact. * * @param groupId * @param xRegistryArtifactType @@ -894,17 +974,17 @@ private InputStream fetchContentFromURL(Client client, URI url) { * @param references */ @SuppressWarnings("deprecation") - private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistryArtifactType, String xRegistryArtifactId, - String xRegistryVersion, IfExists ifExists, Boolean canonical, - String xRegistryDescription, String xRegistryDescriptionEncoded, - String xRegistryName, String xRegistryNameEncoded, - String xRegistryContentHash, String xRegistryHashAlgorithm, - InputStream data, List references) { + private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistryArtifactType, + String xRegistryArtifactId, String xRegistryVersion, IfExists ifExists, Boolean canonical, + String xRegistryDescription, String xRegistryDescriptionEncoded, String xRegistryName, + String xRegistryNameEncoded, String xRegistryContentHash, String xRegistryHashAlgorithm, + InputStream data, List references) { requireParameter("groupId", groupId); maxOneOf("X-Registry-Name", xRegistryName, "X-Registry-Name-Encoded", xRegistryNameEncoded); - maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", xRegistryDescriptionEncoded); + maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", + xRegistryDescriptionEncoded); String artifactName = getOneOf(xRegistryName, decode(xRegistryNameEncoded)); String artifactDescription = getOneOf(xRegistryDescription, decode(xRegistryDescriptionEncoded)); @@ -924,13 +1004,16 @@ private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistry if (xRegistryContentHash != null) { String calculatedSha = null; try { - RegistryHashAlgorithm algorithm = (xRegistryHashAlgorithm == null) ? RegistryHashAlgorithm.SHA256 : RegistryHashAlgorithm.valueOf(xRegistryHashAlgorithm); + RegistryHashAlgorithm algorithm = (xRegistryHashAlgorithm == null) + ? RegistryHashAlgorithm.SHA256 : RegistryHashAlgorithm.valueOf(xRegistryHashAlgorithm); switch (algorithm) { case MD5: - calculatedSha = Hashing.md5().hashString(content.content(), StandardCharsets.UTF_8).toString(); + calculatedSha = Hashing.md5().hashString(content.content(), StandardCharsets.UTF_8) + .toString(); break; case SHA256: - calculatedSha = Hashing.sha256().hashString(content.content(), StandardCharsets.UTF_8).toString(); + calculatedSha = Hashing.sha256().hashString(content.content(), StandardCharsets.UTF_8) + .toString(); break; } } catch (Exception e) { @@ -953,24 +1036,24 @@ private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistry } else if (!ArtifactIdValidator.isArtifactIdAllowed(artifactId)) { throw new InvalidArtifactIdException(ArtifactIdValidator.ARTIFACT_ID_ERROR_MESSAGE); } - if (ContentTypeUtil.isApplicationYaml(ct) || - (ContentTypeUtil.isApplicationCreateExtended(ct) && ContentTypeUtil.isParsableYaml(content))) { + if (ContentTypeUtil.isApplicationYaml(ct) || (ContentTypeUtil.isApplicationCreateExtended(ct) + && ContentTypeUtil.isParsableYaml(content))) { content = ContentTypeUtil.yamlToJson(content); ct = ContentTypes.APPLICATION_JSON; } TypedContent typedContent = TypedContent.create(content, ct); - String artifactType = ArtifactTypeUtil.determineArtifactType(typedContent, xRegistryArtifactType, factory); + String artifactType = ArtifactTypeUtil.determineArtifactType(typedContent, xRegistryArtifactType, + factory); final List referencesAsDtos = toReferenceDtos(references); - //Try to resolve the new artifact references and the nested ones (if any) + // Try to resolve the new artifact references and the nested ones (if any) final Map resolvedReferences = storage.resolveReferences(referencesAsDtos); rulesService.applyRules(defaultGroupIdToNull(groupId), artifactId, artifactType, typedContent, RuleApplicationType.CREATE, toV3Refs(references), resolvedReferences); - // Extract metadata from content, then override extracted values with provided values. EditableArtifactMetaDataDto metaData = extractMetaData(artifactType, content); if (artifactName != null && artifactName.trim().isEmpty()) { @@ -980,16 +1063,10 @@ private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistry metaData.setDescription(artifactDescription); } - ContentWrapperDto contentDto = ContentWrapperDto.builder() - .contentType(ct) - .content(content) - .references(referencesAsDtos) - .build(); + ContentWrapperDto contentDto = ContentWrapperDto.builder().contentType(ct).content(content) + .references(referencesAsDtos).build(); EditableVersionMetaDataDto versionMetaData = EditableVersionMetaDataDto.builder() - .name(metaData.getName()) - .description(metaData.getDescription()) - .labels(Map.of()) - .build(); + .name(metaData.getName()).description(metaData.getDescription()).labels(Map.of()).build(); Pair createResult = storage.createArtifact( defaultGroupIdToNull(groupId), artifactId, artifactType, metaData, xRegistryVersion, @@ -997,16 +1074,19 @@ private ArtifactMetaData createArtifactWithRefs(String groupId, String xRegistry return V2ApiUtil.dtoToMetaData(groupId, artifactId, artifactType, createResult.getRight()); } catch (ArtifactAlreadyExistsException ex) { - return handleIfExists(groupId, xRegistryArtifactId, xRegistryVersion, ifExists, artifactName, artifactDescription, content, ct, fcanonical, references); + return handleIfExists(groupId, xRegistryArtifactId, xRegistryVersion, ifExists, artifactName, + artifactDescription, content, ct, fcanonical, references); } } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#listArtifactVersions(String, String, BigInteger, BigInteger) + * @see io.apicurio.registry.rest.v2.GroupsResource#listArtifactVersions(String, String, BigInteger, + * BigInteger) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public VersionSearchResults listArtifactVersions(String groupId, String artifactId, BigInteger offset, BigInteger limit) { + public VersionSearchResults listArtifactVersions(String groupId, String artifactId, BigInteger offset, + BigInteger limit) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); @@ -1020,42 +1100,50 @@ public VersionSearchResults listArtifactVersions(String groupId, String artifact limit = BigInteger.valueOf(20); } - Set filters = Set.of( - SearchFilter.ofGroupId(defaultGroupIdToNull(groupId)), - SearchFilter.ofArtifactId(artifactId) - ); - VersionSearchResultsDto resultsDto = storage.searchVersions(filters, OrderBy.createdOn, OrderDirection.asc, offset.intValue(), limit.intValue()); + Set filters = Set.of(SearchFilter.ofGroupId(defaultGroupIdToNull(groupId)), + SearchFilter.ofArtifactId(artifactId)); + VersionSearchResultsDto resultsDto = storage.searchVersions(filters, OrderBy.createdOn, + OrderDirection.asc, offset.intValue(), limit.intValue()); return V2ApiUtil.dtoToSearchResults(resultsDto); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactVersion(String, String, String, String, String, String, String, InputStream) + * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactVersion(String, String, String, String, + * String, String, String, InputStream) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, "4", KEY_DESCRIPTION, "5", KEY_DESCRIPTION_ENCODED, "6", KEY_NAME_ENCODED}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, + "4", KEY_DESCRIPTION, "5", KEY_DESCRIPTION_ENCODED, "6", KEY_NAME_ENCODED }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public VersionMetaData createArtifactVersion(String groupId, String artifactId, - String xRegistryVersion, String xRegistryName, - String xRegistryDescription, String xRegistryDescriptionEncoded, - String xRegistryNameEncoded, InputStream data) { - return this.createArtifactVersionWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, xRegistryDescription, xRegistryDescriptionEncoded, xRegistryNameEncoded, data, Collections.emptyList()); + public VersionMetaData createArtifactVersion(String groupId, String artifactId, String xRegistryVersion, + String xRegistryName, String xRegistryDescription, String xRegistryDescriptionEncoded, + String xRegistryNameEncoded, InputStream data) { + return this.createArtifactVersionWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, + xRegistryDescription, xRegistryDescriptionEncoded, xRegistryNameEncoded, data, + Collections.emptyList()); } /** - * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactVersion(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v2.beans.ArtifactContent) + * @see io.apicurio.registry.rest.v2.GroupsResource#createArtifactVersion(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, + * java.lang.String, io.apicurio.registry.rest.v2.beans.ArtifactContent) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, "4", KEY_DESCRIPTION, "5", KEY_DESCRIPTION_ENCODED, "6", KEY_NAME_ENCODED}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_NAME, + "4", KEY_DESCRIPTION, "5", KEY_DESCRIPTION_ENCODED, "6", KEY_NAME_ENCODED }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public VersionMetaData createArtifactVersion(String groupId, String artifactId, String xRegistryVersion, - String xRegistryName, String xRegistryDescription, String xRegistryDescriptionEncoded, - String xRegistryNameEncoded, ArtifactContent data) { + String xRegistryName, String xRegistryDescription, String xRegistryDescriptionEncoded, + String xRegistryNameEncoded, ArtifactContent data) { requireParameter("content", data.getContent()); - return this.createArtifactVersionWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, xRegistryDescription, xRegistryDescriptionEncoded, xRegistryNameEncoded, IoUtil.toStream(data.getContent()), data.getReferences()); + return this.createArtifactVersionWithRefs(groupId, artifactId, xRegistryVersion, xRegistryName, + xRegistryDescription, xRegistryDescriptionEncoded, xRegistryNameEncoded, + IoUtil.toStream(data.getContent()), data.getReferences()); } /** - * Creates an artifact version with references. Shared implementation for both variants of createArtifactVersion. + * Creates an artifact version with references. Shared implementation for both variants of + * createArtifactVersion. * * @param groupId * @param artifactId @@ -1067,13 +1155,17 @@ public VersionMetaData createArtifactVersion(String groupId, String artifactId, * @param data * @param references */ - private VersionMetaData createArtifactVersionWithRefs(String groupId, String artifactId, String xRegistryVersion, String xRegistryName, String xRegistryDescription, String xRegistryDescriptionEncoded, String xRegistryNameEncoded, InputStream data, List references) { + private VersionMetaData createArtifactVersionWithRefs(String groupId, String artifactId, + String xRegistryVersion, String xRegistryName, String xRegistryDescription, + String xRegistryDescriptionEncoded, String xRegistryNameEncoded, InputStream data, + List references) { // TODO do something with the user-provided version info requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); maxOneOf("X-Registry-Name", xRegistryName, "X-Registry-Name-Encoded", xRegistryNameEncoded); - maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", xRegistryDescriptionEncoded); + maxOneOf("X-Registry-Description", xRegistryDescription, "X-Registry-Description-Encoded", + xRegistryDescriptionEncoded); String artifactName = getOneOf(xRegistryName, decode(xRegistryNameEncoded)); String artifactDescription = getOneOf(xRegistryDescription, decode(xRegistryDescriptionEncoded)); @@ -1088,7 +1180,8 @@ private VersionMetaData createArtifactVersionWithRefs(String groupId, String art ct = ContentTypes.APPLICATION_JSON; } - // Transform the given references into dtos and set the contentId, this will also detect if any of the passed references does not exist. + // Transform the given references into dtos and set the contentId, this will also detect if any of the + // passed references does not exist. final List referencesAsDtos = toReferenceDtos(references); // Try to resolve the new artifact references and the nested ones (if any) @@ -1099,14 +1192,12 @@ private VersionMetaData createArtifactVersionWithRefs(String groupId, String art rulesService.applyRules(defaultGroupIdToNull(groupId), artifactId, artifactType, typedContent, RuleApplicationType.UPDATE, toV3Refs(references), resolvedReferences); EditableVersionMetaDataDto metaData = getEditableVersionMetaData(artifactName, artifactDescription); - ContentWrapperDto contentDto = ContentWrapperDto.builder() - .content(content) - .contentType(ct) - .references(referencesAsDtos) - .build(); - ArtifactVersionMetaDataDto vmdDto = storage.createArtifactVersion(defaultGroupIdToNull(groupId), artifactId, - xRegistryVersion, artifactType, contentDto, metaData, List.of()); - return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, artifactType, vmdDto); + ContentWrapperDto contentDto = ContentWrapperDto.builder().content(content).contentType(ct) + .references(referencesAsDtos).build(); + ArtifactVersionMetaDataDto vmdDto = storage.createArtifactVersion(defaultGroupIdToNull(groupId), + artifactId, xRegistryVersion, artifactType, contentDto, metaData, List.of()); + return V2ApiUtil.dtoToVersionMetaData(defaultGroupIdToNull(groupId), artifactId, artifactType, + vmdDto); } /** @@ -1118,7 +1209,8 @@ private VersionMetaData createArtifactVersionWithRefs(String groupId, String art * @param version * @param builder */ - private void checkIfDeprecated(Supplier stateSupplier, String groupId, String artifactId, String version, Response.ResponseBuilder builder) { + private void checkIfDeprecated(Supplier stateSupplier, String groupId, String artifactId, + String version, Response.ResponseBuilder builder) { HeadersHack.checkIfDeprecated(stateSupplier, groupId, artifactId, version, builder); } @@ -1133,8 +1225,7 @@ private String lookupArtifactType(String groupId, String artifactId) { } /** - * Make sure this is ONLY used when request instance is active. - * e.g. in actual http request + * Make sure this is ONLY used when request instance is active. e.g. in actual http request */ private String getContentType() { return request.getContentType(); @@ -1146,7 +1237,8 @@ private static final void requireParameter(String parameterName, Object paramete } } - private static void maxOneOf(String parameterOneName, Object parameterOneValue, String parameterTwoName, Object parameterTwoValue) { + private static void maxOneOf(String parameterOneName, Object parameterOneValue, String parameterTwoName, + Object parameterTwoValue) { if (parameterOneValue != null && parameterTwoValue != null) { throw new ParametersConflictException(parameterOneName, parameterTwoName); } @@ -1163,9 +1255,9 @@ private static String decode(String encoded) { return new String(Base64.decode(encoded)); } - private ArtifactMetaData handleIfExists(String groupId, String artifactId, String version, IfExists ifExists, - String artifactName, String artifactDescription, ContentHandle content, - String contentType, boolean canonical, List references) { + private ArtifactMetaData handleIfExists(String groupId, String artifactId, String version, + IfExists ifExists, String artifactName, String artifactDescription, ContentHandle content, + String contentType, boolean canonical, List references) { final ArtifactMetaData artifactMetaData = getArtifactMetaData(groupId, artifactId); if (ifExists == null) { ifExists = IfExists.FAIL; @@ -1173,33 +1265,39 @@ private ArtifactMetaData handleIfExists(String groupId, String artifactId, Strin switch (ifExists) { case UPDATE: - return updateArtifactInternal(groupId, artifactId, version, artifactName, artifactDescription, content, contentType, references); + return updateArtifactInternal(groupId, artifactId, version, artifactName, artifactDescription, + content, contentType, references); case RETURN: return artifactMetaData; case RETURN_OR_UPDATE: - return handleIfExistsReturnOrUpdate(groupId, artifactId, version, artifactName, artifactDescription, content, contentType, canonical, references); + return handleIfExistsReturnOrUpdate(groupId, artifactId, version, artifactName, + artifactDescription, content, contentType, canonical, references); default: throw new ArtifactAlreadyExistsException(groupId, artifactId); } } private ArtifactMetaData handleIfExistsReturnOrUpdate(String groupId, String artifactId, String version, - String artifactName, String artifactDescription, - ContentHandle content, String contentType, boolean canonical, List references) { + String artifactName, String artifactDescription, ContentHandle content, String contentType, + boolean canonical, List references) { try { TypedContent typedContent = TypedContent.create(content, contentType); - ArtifactVersionMetaDataDto mdDto = this.storage.getArtifactVersionMetaDataByContent(defaultGroupIdToNull(groupId), - artifactId, canonical, typedContent, toReferenceDtos(references)); - ArtifactMetaData md = V2ApiUtil.dtoToMetaData(defaultGroupIdToNull(groupId), artifactId, null, mdDto); + ArtifactVersionMetaDataDto mdDto = this.storage.getArtifactVersionMetaDataByContent( + defaultGroupIdToNull(groupId), artifactId, canonical, typedContent, + toReferenceDtos(references)); + ArtifactMetaData md = V2ApiUtil.dtoToMetaData(defaultGroupIdToNull(groupId), artifactId, null, + mdDto); return md; } catch (ArtifactNotFoundException nfe) { // This is OK - we'll update the artifact if there is no matching content already there. } - return updateArtifactInternal(groupId, artifactId, version, artifactName, artifactDescription, content, contentType, references); + return updateArtifactInternal(groupId, artifactId, version, artifactName, artifactDescription, + content, contentType, references); } - private ArtifactMetaData updateArtifactInternal(String groupId, String artifactId, String version, String name, String description, - ContentHandle content, String contentType, List references) { + private ArtifactMetaData updateArtifactInternal(String groupId, String artifactId, String version, + String name, String description, ContentHandle content, String contentType, + List references) { if (ContentTypeUtil.isApplicationYaml(contentType)) { content = ContentTypeUtil.yamlToJson(content); @@ -1208,7 +1306,8 @@ private ArtifactMetaData updateArtifactInternal(String groupId, String artifactI String artifactType = lookupArtifactType(groupId, artifactId); - //Transform the given references into dtos and set the contentId, this will also detect if any of the passed references does not exist. + // Transform the given references into dtos and set the contentId, this will also detect if any of the + // passed references does not exist. final List referencesAsDtos = toReferenceDtos(references); final Map resolvedReferences = storage.resolveReferences(referencesAsDtos); @@ -1225,19 +1324,13 @@ private ArtifactMetaData updateArtifactInternal(String groupId, String artifactI if (description != null && description.trim().isEmpty()) { artifactMD.setDescription(description); } - EditableVersionMetaDataDto metaData = EditableVersionMetaDataDto.builder() - .name(artifactMD.getName()) - .description(artifactMD.getDescription()) - .labels(artifactMD.getLabels()) - .build(); + EditableVersionMetaDataDto metaData = EditableVersionMetaDataDto.builder().name(artifactMD.getName()) + .description(artifactMD.getDescription()).labels(artifactMD.getLabels()).build(); - ContentWrapperDto contentDto = ContentWrapperDto.builder() - .content(content) - .contentType(contentType) - .references(referencesAsDtos) - .build(); - ArtifactVersionMetaDataDto dto = storage.createArtifactVersion(defaultGroupIdToNull(groupId), artifactId, - version, artifactType, contentDto, metaData, List.of()); + ContentWrapperDto contentDto = ContentWrapperDto.builder().content(content).contentType(contentType) + .references(referencesAsDtos).build(); + ArtifactVersionMetaDataDto dto = storage.createArtifactVersion(defaultGroupIdToNull(groupId), + artifactId, version, artifactType, contentDto, metaData, List.of()); // Note: if the version was created, we need to update the artifact metadata as well, because // those are the semantics of the v2 API. :( @@ -1248,20 +1341,14 @@ private ArtifactMetaData updateArtifactInternal(String groupId, String artifactI private EditableArtifactMetaDataDto getEditableArtifactMetaData(String name, String description) { if (name != null || description != null) { - return EditableArtifactMetaDataDto.builder() - .name(name) - .description(description) - .build(); + return EditableArtifactMetaDataDto.builder().name(name).description(description).build(); } return null; } private EditableVersionMetaDataDto getEditableVersionMetaData(String name, String description) { if (name != null || description != null) { - return EditableVersionMetaDataDto.builder() - .name(name) - .description(description) - .build(); + return EditableVersionMetaDataDto.builder().name(name).description(description).build(); } return null; } @@ -1270,26 +1357,22 @@ private List toReferenceDtos(List refer if (references == null) { references = Collections.emptyList(); } - return references.stream() - .map(r -> { - r.setGroupId(defaultGroupIdToNull(r.getGroupId())); - return r; - }) // .peek(...) may be optimized away - .map(V2ApiUtil::referenceToDto) - .collect(Collectors.toList()); + return references.stream().map(r -> { + r.setGroupId(defaultGroupIdToNull(r.getGroupId())); + return r; + }) // .peek(...) may be optimized away + .map(V2ApiUtil::referenceToDto).collect(Collectors.toList()); } - private static List toV3Refs(List references) { + private static List toV3Refs( + List references) { return references.stream().map(ref -> toV3Ref(ref)).collect(Collectors.toList()); } private static io.apicurio.registry.rest.v3.beans.ArtifactReference toV3Ref(ArtifactReference reference) { return io.apicurio.registry.rest.v3.beans.ArtifactReference.builder() - .artifactId(reference.getArtifactId()) - .groupId(reference.getGroupId()) - .version(reference.getVersion()) - .name(reference.getName()) - .build(); + .artifactId(reference.getArtifactId()).groupId(reference.getGroupId()) + .version(reference.getVersion()).name(reference.getName()).build(); } protected EditableArtifactMetaDataDto extractMetaData(String artifactType, ContentHandle content) { @@ -1298,7 +1381,8 @@ protected EditableArtifactMetaDataDto extractMetaData(String artifactType, Conte ExtractedMetaData emd = extractor.extract(content); EditableArtifactMetaDataDto metaData; if (emd != null) { - metaData = new EditableArtifactMetaDataDto(emd.getName(), emd.getDescription(), null, emd.getLabels()); + metaData = new EditableArtifactMetaDataDto(emd.getName(), emd.getDescription(), null, + emd.getLabels()); } else { metaData = new EditableArtifactMetaDataDto(); } diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/IdsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/IdsResourceImpl.java index 202d3fabed..d8d810b93e 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/IdsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/IdsResourceImpl.java @@ -29,14 +29,15 @@ import java.util.stream.Collectors; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class IdsResourceImpl extends AbstractResourceImpl implements IdsResource { @Inject CommonResourceOperations common; - private void checkIfDeprecated(Supplier stateSupplier, String artifactId, String version, Response.ResponseBuilder builder) { + private void checkIfDeprecated(Supplier stateSupplier, String artifactId, String version, + Response.ResponseBuilder builder) { HeadersHack.checkIfDeprecated(stateSupplier, null, artifactId, version, builder); } @@ -69,9 +70,11 @@ public Response getContentByGlobalId(long globalId, Boolean dereference) { StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(globalId); TypedContent contentToReturn = TypedContent.create(artifact.getContent(), artifact.getContentType()); - handleContentReferences(dereference, metaData.getArtifactType(), contentToReturn, artifact.getReferences()); + handleContentReferences(dereference, metaData.getArtifactType(), contentToReturn, + artifact.getReferences()); - Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), contentToReturn.getContentType()); + Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), + contentToReturn.getContentType()); checkIfDeprecated(metaData::getState, metaData.getArtifactId(), metaData.getVersion(), builder); return builder.build(); } @@ -101,26 +104,25 @@ public List referencesByContentHash(String contentHash) { @Override public List referencesByContentId(long contentId) { ContentWrapperDto artifact = storage.getContentById(contentId); - return artifact.getReferences().stream() - .map(V2ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V2ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } /** - * @see io.apicurio.registry.rest.v2.IdsResource#referencesByGlobalId(long, io.apicurio.registry.types.ReferenceType) + * @see io.apicurio.registry.rest.v2.IdsResource#referencesByGlobalId(long, + * io.apicurio.registry.types.ReferenceType) */ @Override public List referencesByGlobalId(long globalId, ReferenceType refType) { if (refType == ReferenceType.OUTBOUND || refType == null) { StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(globalId); - return artifact.getReferences().stream() - .map(V2ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V2ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } else { ArtifactVersionMetaDataDto amd = storage.getArtifactVersionMetaData(globalId); - return storage.getInboundArtifactReferences(amd.getGroupId(), amd.getArtifactId(), amd.getVersion()).stream() - .map(V2ApiUtil::referenceDtoToReference) - .collect(Collectors.toList()); + return storage + .getInboundArtifactReferences(amd.getGroupId(), amd.getArtifactId(), amd.getVersion()) + .stream().map(V2ApiUtil::referenceDtoToReference).collect(Collectors.toList()); } } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/SearchResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/SearchResourceImpl.java index 6844839ad2..2cb48a9406 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/SearchResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/SearchResourceImpl.java @@ -7,6 +7,7 @@ import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.content.TypedContent; import io.apicurio.registry.content.canon.ContentCanonicalizer; +import io.apicurio.registry.content.util.ContentTypeUtil; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; import io.apicurio.registry.rest.v2.beans.ArtifactSearchResults; @@ -21,7 +22,6 @@ import io.apicurio.registry.types.Current; import io.apicurio.registry.types.provider.ArtifactTypeUtilProvider; import io.apicurio.registry.types.provider.ArtifactTypeUtilProviderFactory; -import io.apicurio.registry.content.util.ContentTypeUtil; import io.apicurio.registry.utils.StringUtil; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; @@ -40,7 +40,7 @@ import java.util.Set; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SearchResourceImpl implements SearchResource { @@ -61,14 +61,14 @@ public class SearchResourceImpl implements SearchResource { HttpServletRequest request; /** - * @see io.apicurio.registry.rest.v2.SearchResource#searchArtifacts(String, BigInteger, BigInteger, SortOrder, SortBy, List, List, String, String, Long, Long) + * @see io.apicurio.registry.rest.v2.SearchResource#searchArtifacts(String, BigInteger, BigInteger, + * SortOrder, SortBy, List, List, String, String, Long, Long) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) - public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, BigInteger limit, SortOrder order, - SortBy orderby, List labels, List properties, String description, String group, - Long globalId, Long contentId) - { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) + public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, BigInteger limit, + SortOrder order, SortBy orderby, List labels, List properties, String description, + String group, Long globalId, Long contentId) { if (orderby == null) { orderby = SortBy.name; } @@ -80,7 +80,8 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet(); if (!StringUtil.isEmpty(name)) { @@ -94,27 +95,27 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big } if (properties != null && !properties.isEmpty()) { - properties.stream() - .map(prop -> { - int delimiterIndex = prop.indexOf(":"); - String propertyKey; - String propertyValue; - if (delimiterIndex == 0) { - throw new BadRequestException("property search filter wrong formatted, missing left side of ':' delimiter"); - } - if (delimiterIndex == (prop.length() - 1)) { - throw new BadRequestException("property search filter wrong formatted, missing right side of ':' delimiter"); - } - if (delimiterIndex < 0) { - propertyKey = prop; - propertyValue = null; - } else{ - propertyKey = prop.substring(0, delimiterIndex); - propertyValue = prop.substring(delimiterIndex + 1); - } - return SearchFilter.ofLabel(propertyKey, propertyValue); - }) - .forEach(filters::add); + properties.stream().map(prop -> { + int delimiterIndex = prop.indexOf(":"); + String propertyKey; + String propertyValue; + if (delimiterIndex == 0) { + throw new BadRequestException( + "property search filter wrong formatted, missing left side of ':' delimiter"); + } + if (delimiterIndex == (prop.length() - 1)) { + throw new BadRequestException( + "property search filter wrong formatted, missing right side of ':' delimiter"); + } + if (delimiterIndex < 0) { + propertyKey = prop; + propertyValue = null; + } else { + propertyKey = prop.substring(0, delimiterIndex); + propertyValue = prop.substring(delimiterIndex + 1); + } + return SearchFilter.ofLabel(propertyKey, propertyValue); + }).forEach(filters::add); } if (globalId != null && globalId > 0) { filters.add(SearchFilter.ofGlobalId(globalId)); @@ -123,16 +124,19 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big filters.add(SearchFilter.ofContentId(contentId)); } - ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V2ApiUtil.dtoToSearchResults(results); } /** - * @see io.apicurio.registry.rest.v2.SearchResource#searchArtifactsByContent(Boolean, String, BigInteger, BigInteger, SortOrder, SortBy, InputStream) + * @see io.apicurio.registry.rest.v2.SearchResource#searchArtifactsByContent(Boolean, String, BigInteger, + * BigInteger, SortOrder, SortBy, InputStream) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) - public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String artifactType, BigInteger offset, BigInteger limit, SortOrder order, SortBy orderby, InputStream data) { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) + public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String artifactType, + BigInteger offset, BigInteger limit, SortOrder order, SortBy orderby, InputStream data) { if (orderby == null) { orderby = SortBy.name; @@ -144,7 +148,8 @@ public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String limit = BigInteger.valueOf(20); } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; if (canonical == null) { canonical = Boolean.FALSE; @@ -170,13 +175,13 @@ public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String } else { throw new BadRequestException(CANONICAL_QUERY_PARAM_ERROR_MESSAGE); } - ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V2ApiUtil.dtoToSearchResults(results); } /** - * Make sure this is ONLY used when request instance is active. - * e.g. in actual http request + * Make sure this is ONLY used when request instance is active. e.g. in actual http request */ private String getContentType() { return request.getContentType(); diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/SystemResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/SystemResourceImpl.java index 96981cbe79..ee4c790659 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/SystemResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/SystemResourceImpl.java @@ -5,18 +5,17 @@ import io.apicurio.registry.auth.Authorized; import io.apicurio.registry.auth.AuthorizedLevel; import io.apicurio.registry.auth.AuthorizedStyle; +import io.apicurio.registry.limits.RegistryLimitsConfiguration; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; -import io.apicurio.registry.limits.RegistryLimitsConfiguration; import io.apicurio.registry.rest.v2.beans.Limits; import io.apicurio.registry.rest.v2.beans.SystemInfo; - import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; import jakarta.interceptor.Interceptors; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SystemResourceImpl implements SystemResource { @@ -30,7 +29,7 @@ public class SystemResourceImpl implements SystemResource { * @see io.apicurio.registry.rest.v2.SystemResource#getSystemInfo() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public SystemInfo getSystemInfo() { SystemInfo info = new SystemInfo(); info.setName(system.getName()); diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/UsersResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v2/UsersResourceImpl.java index bed9da8e34..c3ce2d52c0 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/UsersResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/UsersResourceImpl.java @@ -1,25 +1,23 @@ package io.apicurio.registry.rest.v2; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; -import jakarta.interceptor.Interceptors; - -import org.slf4j.Logger; - +import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.auth.AdminOverride; import io.apicurio.registry.auth.AuthConfig; import io.apicurio.registry.auth.Authorized; import io.apicurio.registry.auth.AuthorizedLevel; import io.apicurio.registry.auth.AuthorizedStyle; import io.apicurio.registry.auth.RoleBasedAccessController; -import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; import io.apicurio.registry.rest.v2.beans.UserInfo; import io.quarkus.security.identity.SecurityIdentity; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.interceptor.Interceptors; +import org.slf4j.Logger; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class UsersResourceImpl implements UsersResource { @@ -42,11 +40,13 @@ public class UsersResourceImpl implements UsersResource { * @see io.apicurio.registry.rest.v2.UsersResource#getCurrentUserInfo() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public UserInfo getCurrentUserInfo() { UserInfo info = new UserInfo(); info.setUsername(securityIdentity.getPrincipal().getName()); - info.setDisplayName(securityIdentity.getPrincipal().getName()); // TODO need a better implementation of this, maybe use claims first_name and last_name + info.setDisplayName(securityIdentity.getPrincipal().getName()); // TODO need a better implementation + // of this, maybe use claims + // first_name and last_name if (authConfig.isRbacEnabled()) { info.setAdmin(rbac.isAdmin()); info.setDeveloper(rbac.isDeveloper()); diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/V2ApiUtil.java b/app/src/main/java/io/apicurio/registry/rest/v2/V2ApiUtil.java index d83548374a..5b9b9ba7ab 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/V2ApiUtil.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/V2ApiUtil.java @@ -44,8 +44,8 @@ private V2ApiUtil() { * @param artifactType * @param dto */ - public static ArtifactMetaData dtoToMetaData(String groupId, String artifactId, - String artifactType, ArtifactMetaDataDto dto) { + public static ArtifactMetaData dtoToMetaData(String groupId, String artifactId, String artifactType, + ArtifactMetaDataDto dto) { ArtifactMetaData metaData = new ArtifactMetaData(); metaData.setCreatedBy(dto.getOwner()); metaData.setCreatedOn(new Date(dto.getCreatedOn())); @@ -76,6 +76,7 @@ public static ArtifactMetaData dtoToMetaData(String groupId, String artifactId, /** * Converts v3 labels into v2 properties. + * * @param v3Labels * @return */ @@ -96,6 +97,7 @@ public static Map toV2Properties(Map v3Labels) { /** * Converts v3 labels into v2 labels. + * * @param v3Labels */ public static List toV2Labels(Map v3Labels) { @@ -115,6 +117,7 @@ public static List toV2Labels(Map v3Labels) { /** * Converts v2 labels and properties into v3 labels. + * * @param v2Labels * @param v2Properties */ @@ -139,7 +142,7 @@ public static Map toV3Labels(List v2Labels, Map comparator(SortOrder sortOrder) { return (id1, id2) -> compare(sortOrder, id1, id2); } - public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, ArtifactMetaDataDto metaDataDto2) { + public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, + ArtifactMetaDataDto metaDataDto2) { String name1 = metaDataDto1.getName(); if (name1 == null) { name1 = metaDataDto1.getArtifactId(); @@ -247,7 +252,8 @@ public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, if (name2 == null) { name2 = metaDataDto2.getArtifactId(); } - return sortOrder == SortOrder.desc ? name2.compareToIgnoreCase(name1) : name1.compareToIgnoreCase(name2); + return sortOrder == SortOrder.desc ? name2.compareToIgnoreCase(name1) + : name1.compareToIgnoreCase(name2); } public static ArtifactSearchResults dtoToSearchResults(ArtifactSearchResultsDto dto) { @@ -339,19 +345,15 @@ public static GroupMetaData groupDtoToGroup(GroupMetaDataDto dto) { } public static Comment commentDtoToComment(CommentDto dto) { - return Comment.builder() - .commentId(dto.getCommentId()) - .createdBy(dto.getOwner()) - .createdOn(new Date(dto.getCreatedOn())) - .value(dto.getValue()) - .build(); + return Comment.builder().commentId(dto.getCommentId()).createdBy(dto.getOwner()) + .createdOn(new Date(dto.getCreatedOn())).value(dto.getValue()).build(); } public static String prettyPrintReferences(Collection references) { return references.stream() - .map(ar -> nullGroupIdToDefault(ar.getGroupId()) + ":" + ar.getArtifactId() + ":" + ar.getVersion() + "->" + ar.getName()) - .reduce((left, right) -> left + ", " + right) - .orElse(""); + .map(ar -> nullGroupIdToDefault(ar.getGroupId()) + ":" + ar.getArtifactId() + ":" + + ar.getVersion() + "->" + ar.getName()) + .reduce((left, right) -> left + ", " + right).orElse(""); } public static String defaultGroupIdToNull(String groupId) { diff --git a/app/src/main/java/io/apicurio/registry/rest/v2/shared/CommonResourceOperations.java b/app/src/main/java/io/apicurio/registry/rest/v2/shared/CommonResourceOperations.java index 91a1abc547..02ecc9ba04 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v2/shared/CommonResourceOperations.java +++ b/app/src/main/java/io/apicurio/registry/rest/v2/shared/CommonResourceOperations.java @@ -5,11 +5,11 @@ import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.storage.dto.ContentWrapperDto; import io.apicurio.registry.types.Current; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; import java.util.List; import java.util.stream.Collectors; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; @ApplicationScoped public class CommonResourceOperations { @@ -20,8 +20,7 @@ public class CommonResourceOperations { public List getReferencesByContentHash(String contentHash) { ContentWrapperDto artifact = storage.getContentByHash(contentHash); - return artifact.getReferences().stream() - .map(V2ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V2ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/AbstractResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/AbstractResourceImpl.java index 86d0973904..629cccd375 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/AbstractResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/AbstractResourceImpl.java @@ -26,7 +26,7 @@ import java.util.Map; public abstract class AbstractResourceImpl { - + @Inject Logger log; @@ -45,11 +45,11 @@ public abstract class AbstractResourceImpl { String apiBaseHref; /** - * Handle the content references based on the value of "HandleReferencesType" - this can either mean - * we need to fully dereference the content, or we need to rewrite the references, or we do nothing. + * Handle the content references based on the value of "HandleReferencesType" - this can either mean we + * need to fully dereference the content, or we need to rewrite the references, or we do nothing. */ protected TypedContent handleContentReferences(HandleReferencesType referencesType, String artifactType, - TypedContent content, List references) { + TypedContent content, List references) { // Dereference or rewrite references if (!references.isEmpty()) { if (referencesType == HandleReferencesType.DEREFERENCE) { @@ -68,9 +68,10 @@ protected TypedContent handleContentReferences(HandleReferencesType referencesTy } /** - * Convert the list of references into a list of REST API URLs that point to the content. This means - * that we generate a REST API URL from the GAV (groupId, artifactId, version) information found in - * each reference. + * Convert the list of references into a list of REST API URLs that point to the content. This means that + * we generate a REST API URL from the GAV (groupId, artifactId, version) information found in each + * reference. + * * @param references */ protected Map resolveReferenceUrls(List references) { @@ -89,8 +90,9 @@ protected Map resolveReferenceUrls(List re } /** - * Convert a single artifact reference to a REST API URL. This means that we generate a REST API URL - * from the GAV (groupId, artifactId, version) information found in the reference. + * Convert a single artifact reference to a REST API URL. This means that we generate a REST API URL from + * the GAV (groupId, artifactId, version) information found in the reference. + * * @param reference */ protected String resolveReferenceUrl(ArtifactReferenceDto reference) { @@ -108,7 +110,7 @@ protected String resolveReferenceUrl(ArtifactReferenceDto reference) { this.log.error("Error trying to determine the baseHref of the REST API.", e); return null; } - + if (baseHref == null) { this.log.warn("Failed to determine baseHref for the REST API."); return null; diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/AdminResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/AdminResourceImpl.java index ff6c0154e8..f7b1b418c5 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/AdminResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/AdminResourceImpl.java @@ -76,7 +76,7 @@ import static io.apicurio.registry.utils.DtoUtil.registryAuthPropertyToApp; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class AdminResourceImpl implements AdminResource { @@ -120,22 +120,18 @@ private static void requireParameter(String parameterName, Object parameterValue * @see io.apicurio.registry.rest.v3.AdminResource#listArtifactTypes() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public List listArtifactTypes() { - return factory - .getAllArtifactTypes() - .stream() - .map(t -> { - ArtifactTypeInfo ati = new ArtifactTypeInfo(); - ati.setName(t); - return ati; - }) - .collect(Collectors.toList()); + return factory.getAllArtifactTypes().stream().map(t -> { + ArtifactTypeInfo ati = new ArtifactTypeInfo(); + ati.setName(t); + return ati; + }).collect(Collectors.toList()); } @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public SnapshotMetaData triggerSnapshot() { storage.triggerSnapshotCreation(); return SnapshotMetaData.builder().build(); @@ -145,21 +141,19 @@ public SnapshotMetaData triggerSnapshot() { * @see io.apicurio.registry.rest.v3.AdminResource#listGlobalRules() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public List listGlobalRules() { List rules = storage.getGlobalRules(); List defaultRules = rulesProperties.getFilteredDefaultGlobalRules(rules); - return Stream.concat(rules.stream(), defaultRules.stream()) - .sorted() - .collect(Collectors.toList()); + return Stream.concat(rules.stream(), defaultRules.stream()).sorted().collect(Collectors.toList()); } /** * @see io.apicurio.registry.rest.v3.AdminResource#createGlobalRule(CreateRule) */ @Override - @Audited(extractParameters = {"0", KEY_RULE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void createGlobalRule(CreateRule data) { RuleType ruleType = data.getRuleType(); requireParameter("ruleType", ruleType); @@ -178,7 +172,7 @@ public void createGlobalRule(CreateRule data) { */ @Override @Audited - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void deleteAllGlobalRules() { storage.deleteGlobalRules(); } @@ -187,7 +181,7 @@ public void deleteAllGlobalRules() { * @see io.apicurio.registry.rest.v3.AdminResource#getGlobalRuleConfig(io.apicurio.registry.types.RuleType) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) public Rule getGlobalRuleConfig(RuleType ruleType) { RuleConfigurationDto dto; try { @@ -206,11 +200,12 @@ public Rule getGlobalRuleConfig(RuleType ruleType) { } /** - * @see io.apicurio.registry.rest.v3.AdminResource#updateGlobalRuleConfig(io.apicurio.registry.types.RuleType, io.apicurio.registry.rest.v3.beans.Rule) + * @see io.apicurio.registry.rest.v3.AdminResource#updateGlobalRuleConfig(io.apicurio.registry.types.RuleType, + * io.apicurio.registry.rest.v3.beans.Rule) */ @Override - @Audited(extractParameters = {"0", KEY_RULE_TYPE, "1", KEY_RULE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE_TYPE, "1", KEY_RULE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public Rule updateGlobalRuleConfig(RuleType ruleType, Rule data) { RuleConfigurationDto configDto = new RuleConfigurationDto(); configDto.setConfiguration(data.getConfig()); @@ -235,8 +230,8 @@ public Rule updateGlobalRuleConfig(RuleType ruleType, Rule data) { * @see io.apicurio.registry.rest.v3.AdminResource#deleteGlobalRule(io.apicurio.registry.types.RuleType) */ @Override - @Audited(extractParameters = {"0", KEY_RULE_TYPE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_RULE_TYPE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void deleteGlobalRule(RuleType rule) { try { storage.deleteGlobalRule(rule); @@ -257,8 +252,9 @@ public void deleteGlobalRule(RuleType rule) { */ @Override @Audited - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) - public void importData(Boolean xRegistryPreserveGlobalId, Boolean xRegistryPreserveContentId, InputStream data) { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) + public void importData(Boolean xRegistryPreserveGlobalId, Boolean xRegistryPreserveContentId, + InputStream data) { final ZipInputStream zip = new ZipInputStream(data, StandardCharsets.UTF_8); final EntityReader reader = new EntityReader(zip); EntityInputStream stream = new EntityInputStream() { @@ -277,20 +273,22 @@ public void close() throws IOException { zip.close(); } }; - this.storage.importData(stream, isNullOrTrue(xRegistryPreserveGlobalId), isNullOrTrue(xRegistryPreserveContentId)); + this.storage.importData(stream, isNullOrTrue(xRegistryPreserveGlobalId), + isNullOrTrue(xRegistryPreserveContentId)); } /** * @see io.apicurio.registry.rest.v3.AdminResource#exportData(java.lang.Boolean) */ @Override - @Audited(extractParameters = {"0", KEY_FOR_BROWSER}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_FOR_BROWSER }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public Response exportData(Boolean forBrowser) { String acceptHeader = request.getHeader("Accept"); if (Boolean.TRUE.equals(forBrowser) || MediaType.APPLICATION_JSON.equals(acceptHeader)) { long expires = System.currentTimeMillis() + (downloadHrefTtl.get() * 1000); - DownloadContextDto downloadCtx = DownloadContextDto.builder().type(DownloadContextType.EXPORT).expires(expires).build(); + DownloadContextDto downloadCtx = DownloadContextDto.builder().type(DownloadContextType.EXPORT) + .expires(expires).build(); String downloadId = storage.createDownload(downloadCtx); String downloadHref = createDownloadHref(downloadId); DownloadRef downloadRef = new DownloadRef(); @@ -306,18 +304,19 @@ public Response exportData(Boolean forBrowser) { * @see io.apicurio.registry.rest.v3.AdminResource#createRoleMapping(io.apicurio.registry.rest.v3.beans.RoleMapping) */ @Override - @Audited(extractParameters = {"0", KEY_ROLE_MAPPING}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_ROLE_MAPPING }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void createRoleMapping(RoleMapping data) { storage.createRoleMapping(data.getPrincipalId(), data.getRole().name(), data.getPrincipalName()); } /** - * @see io.apicurio.registry.rest.v3.AdminResource#listRoleMappings(java.math.BigInteger, java.math.BigInteger) + * @see io.apicurio.registry.rest.v3.AdminResource#listRoleMappings(java.math.BigInteger, + * java.math.BigInteger) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public RoleMappingSearchResults listRoleMappings(BigInteger limit, BigInteger offset) { if (offset == null) { @@ -335,7 +334,7 @@ public RoleMappingSearchResults listRoleMappings(BigInteger limit, BigInteger of * @see io.apicurio.registry.rest.v3.AdminResource#getRoleMapping(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public RoleMapping getRoleMapping(String principalId) { RoleMappingDto dto = storage.getRoleMapping(principalId); @@ -343,11 +342,12 @@ public RoleMapping getRoleMapping(String principalId) { } /** - * @see io.apicurio.registry.rest.v3.AdminResource#updateRoleMapping (java.lang.String, io.apicurio.registry.rest.v3.beans.Role) + * @see io.apicurio.registry.rest.v3.AdminResource#updateRoleMapping (java.lang.String, + * io.apicurio.registry.rest.v3.beans.Role) */ @Override - @Audited(extractParameters = {"0", KEY_PRINCIPAL_ID, "1", KEY_UPDATE_ROLE}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_PRINCIPAL_ID, "1", KEY_UPDATE_ROLE }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void updateRoleMapping(String principalId, UpdateRole data) { requireParameter("principalId", principalId); @@ -359,19 +359,18 @@ public void updateRoleMapping(String principalId, UpdateRole data) { * @see io.apicurio.registry.rest.v3.AdminResource#deleteRoleMapping(java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_PRINCIPAL_ID}) - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_PRINCIPAL_ID }) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) @RoleBasedAccessApiOperation public void deleteRoleMapping(String principalId) { storage.deleteRoleMapping(principalId); } - /** * @see io.apicurio.registry.rest.v3.AdminResource#listConfigProperties() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public List listConfigProperties() { // Query the DB for the set of configured properties. List props = storage.getConfigProperties(); @@ -380,11 +379,14 @@ public List listConfigProperties() { Map propsI = new HashMap<>(); props.forEach(dto -> propsI.put(dto.getName(), dto)); - // Return value is the set of all dynamic config properties, with either configured or default values (depending + // Return value is the set of all dynamic config properties, with either configured or default values + // (depending // on whether the value is actually configured and stored in the DB or not). - return dynamicPropertyIndex.getAcceptedPropertyNames().stream() - .sorted(String::compareTo) - .map(pname -> propsI.containsKey(pname) ? V3ApiUtil.dtoToConfigurationProperty(dynamicPropertyIndex.getProperty(pname), propsI.get(pname)) : defToConfigurationProperty(dynamicPropertyIndex.getProperty(pname))) + return dynamicPropertyIndex.getAcceptedPropertyNames().stream().sorted(String::compareTo) + .map(pname -> propsI.containsKey(pname) + ? V3ApiUtil.dtoToConfigurationProperty(dynamicPropertyIndex.getProperty(pname), + propsI.get(pname)) + : defToConfigurationProperty(dynamicPropertyIndex.getProperty(pname))) .collect(Collectors.toList()); } @@ -392,7 +394,7 @@ public List listConfigProperties() { * @see io.apicurio.registry.rest.v3.AdminResource#getConfigProperty(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public ConfigurationProperty getConfigProperty(String propertyName) { // Ensure that the property is a valid dynamic config property. DynamicConfigPropertyDef def = resolveConfigProperty(propertyName); @@ -406,10 +408,11 @@ public ConfigurationProperty getConfigProperty(String propertyName) { } /** - * @see io.apicurio.registry.rest.v3.AdminResource#updateConfigProperty(java.lang.String, io.apicurio.registry.rest.v3.beans.UpdateConfigurationProperty) + * @see io.apicurio.registry.rest.v3.AdminResource#updateConfigProperty(java.lang.String, + * io.apicurio.registry.rest.v3.beans.UpdateConfigurationProperty) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) public void updateConfigProperty(String propertyName, UpdateConfigurationProperty data) { DynamicConfigPropertyDef propertyDef = resolveConfigProperty(propertyName); validateConfigPropertyValue(propertyDef, data.getValue()); @@ -424,8 +427,8 @@ public void updateConfigProperty(String propertyName, UpdateConfigurationPropert * @see io.apicurio.registry.rest.v3.AdminResource#resetConfigProperty(java.lang.String) */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Admin) - @Audited(extractParameters = {"0", KEY_NAME}) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Admin) + @Audited(extractParameters = { "0", KEY_NAME }) public void resetConfigProperty(String propertyName) { // Check if the config property exists. resolveConfigProperty(propertyName); @@ -433,7 +436,6 @@ public void resetConfigProperty(String propertyName) { storage.deleteConfigProperty(propertyName); } - private static boolean isNullOrTrue(Boolean value) { return value == null || value; } @@ -443,7 +445,8 @@ private String createDownloadHref(String downloadId) { } private ConfigurationProperty defToConfigurationProperty(DynamicConfigPropertyDef def) { - String propertyValue = config.getOptionalValue(def.getName(), String.class).orElse(def.getDefaultValue()); + String propertyValue = config.getOptionalValue(def.getName(), String.class) + .orElse(def.getDefaultValue()); ConfigurationProperty rval = new ConfigurationProperty(); rval.setName(appAuthPropertyToRegistry(def.getName())); @@ -455,8 +458,9 @@ private ConfigurationProperty defToConfigurationProperty(DynamicConfigPropertyDe } /** - * Lookup the dynamic configuration property being set. Ensure that it exists (throws - * a {@link io.apicurio.registry.storage.error.NotFoundException} if it does not. + * Lookup the dynamic configuration property being set. Ensure that it exists (throws a + * {@link io.apicurio.registry.storage.error.NotFoundException} if it does not. + * * @param propertyName the name of the dynamic property * @return the dynamic config property definition */ @@ -466,7 +470,7 @@ private DynamicConfigPropertyDef resolveConfigProperty(String propertyName) { if (property == null) { propertyName = registryAuthPropertyToApp(propertyName); } - //If registry property cannot be found, try with app property + // If registry property cannot be found, try with app property property = dynamicPropertyIndex.getProperty(propertyName); if (property == null) { @@ -480,14 +484,16 @@ private DynamicConfigPropertyDef resolveConfigProperty(String propertyName) { } /** - * Ensure that the value being set on the given property is value for the property type. - * For example, this should fail + * Ensure that the value being set on the given property is value for the property type. For example, this + * should fail + * * @param propertyDef the dynamic config property definition * @param value the config property value */ private void validateConfigPropertyValue(DynamicConfigPropertyDef propertyDef, String value) { if (!propertyDef.isValidValue(value)) { - throw new InvalidPropertyValueException("Invalid dynamic configuration property value for: " + propertyDef.getName()); + throw new InvalidPropertyValueException( + "Invalid dynamic configuration property value for: " + propertyDef.getName()); } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/DownloadsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/DownloadsResourceImpl.java index 490a1ef3f2..099d955ede 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/DownloadsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/DownloadsResourceImpl.java @@ -22,7 +22,7 @@ import jakarta.ws.rs.core.Response; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged @Path("/apis/registry/v3/downloads") public class DownloadsResourceImpl { @@ -34,7 +34,7 @@ public class DownloadsResourceImpl { @Inject DataExporter exporter; - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) @GET @Path("{downloadId}") @Produces("*/*") @@ -50,15 +50,15 @@ public Response download(@PathParam("downloadId") String downloadId) { } /** - * A duplicate version of the above that will allow a filename to be added - * for download purposes. So e.g. /apis/registry/v3/downloads/ABCD-1234 can - * be aliased as /apis/registry/v3/downloads/ABCD-1234/export.zip and work - * the same way. But when saving from a browser, the filename should be - * useful. + * A duplicate version of the above that will allow a filename to be added for download purposes. So e.g. + * /apis/registry/v3/downloads/ABCD-1234 can be aliased as + * /apis/registry/v3/downloads/ABCD-1234/export.zip and work the same way. But when saving from a browser, + * the filename should be useful. + * * @param downloadId * @return */ - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) @GET @Path("{downloadId}/{fileName}") @Produces("*/*") diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/GroupsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/GroupsResourceImpl.java index f1cc5c1c7d..b38e8cf596 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/GroupsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/GroupsResourceImpl.java @@ -120,7 +120,7 @@ * Implements the {@link GroupsResource} JAX-RS interface. */ @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class GroupsResourceImpl extends AbstractResourceImpl implements GroupsResource { @@ -147,31 +147,30 @@ public class GroupsResourceImpl extends AbstractResourceImpl implements GroupsRe CommonResourceOperations common; public enum RegistryHashAlgorithm { - SHA256, - MD5 + SHA256, MD5 } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionReferences(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.types.ReferenceType) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionReferences(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.types.ReferenceType) */ @Override public List getArtifactVersionReferences(String groupId, String artifactId, - String versionExpression, ReferenceType refType) { + String versionExpression, ReferenceType refType) { var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), versionExpression, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.DEFAULT)); if (refType == null || refType == ReferenceType.OUTBOUND) { - return storage.getArtifactVersionContent(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()) - .getReferences() - .stream() - .map(V3ApiUtil::referenceDtoToReference) - .collect(toList()); + return storage + .getArtifactVersionContent(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId()) + .getReferences().stream().map(V3ApiUtil::referenceDtoToReference).collect(toList()); } else { - return storage.getInboundArtifactReferences(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()) - .stream() - .map(V3ApiUtil::referenceDtoToReference) - .collect(toList()); + return storage + .getInboundArtifactReferences(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId()) + .stream().map(V3ApiUtil::referenceDtoToReference).collect(toList()); } } @@ -179,7 +178,7 @@ public List getArtifactVersionReferences(String groupId, Stri * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifact(java.lang.String, java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifact(String groupId, String artifactId) { requireParameter("groupId", groupId); @@ -189,7 +188,8 @@ public void deleteArtifact(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactMetaData(java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactMetaData(java.lang.String, + * java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -197,15 +197,17 @@ public ArtifactMetaData getArtifactMetaData(String groupId, String artifactId) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); - ArtifactMetaDataDto dto = storage.getArtifactMetaData(new GroupId(groupId).getRawGroupIdWithNull(), artifactId); + ArtifactMetaDataDto dto = storage.getArtifactMetaData(new GroupId(groupId).getRawGroupIdWithNull(), + artifactId); return V3ApiUtil.dtoToArtifactMetaData(dto); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactMetaData(java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.EditableArtifactMetaData) + * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactMetaData(java.lang.String, + * java.lang.String, io.apicurio.registry.rest.v3.beans.EditableArtifactMetaData) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_EDITABLE_METADATA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_EDITABLE_METADATA }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaData data) { requireParameter("groupId", groupId); @@ -215,7 +217,8 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr if (data.getOwner().trim().isEmpty()) { throw new MissingRequiredParameterException("Owner cannot be empty"); } else { - // TODO extra security check - if the user is trying to change the owner, fail unless they are an Admin or the current Owner + // TODO extra security check - if the user is trying to change the owner, fail unless they are + // an Admin or the current Owner } } @@ -239,9 +242,10 @@ public GroupMetaData getGroupById(String groupId) { public void deleteGroupById(String groupId) { storage.deleteGroup(groupId); } - + /** - * @see io.apicurio.registry.rest.v3.GroupsResource#updateGroupById(java.lang.String, io.apicurio.registry.rest.v3.beans.EditableGroupMetaData) + * @see io.apicurio.registry.rest.v3.GroupsResource#updateGroupById(java.lang.String, + * io.apicurio.registry.rest.v3.beans.EditableGroupMetaData) */ @Override @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write) @@ -256,7 +260,8 @@ public void updateGroupById(String groupId, EditableGroupMetaData data) { @Override @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) - public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOrder order, GroupSortBy orderby) { + public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOrder order, + GroupSortBy orderby) { if (orderby == null) { orderby = GroupSortBy.groupId; } @@ -268,21 +273,21 @@ public GroupSearchResults listGroups(BigInteger limit, BigInteger offset, SortOr } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = Collections.emptySet(); - GroupSearchResultsDto resultsDto = storage.searchGroups(filters, oBy, oDir, offset.intValue(), limit.intValue()); + GroupSearchResultsDto resultsDto = storage.searchGroups(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(resultsDto); } @Override @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Write) public GroupMetaData createGroup(CreateGroup data) { - GroupMetaDataDto.GroupMetaDataDtoBuilder group = GroupMetaDataDto.builder() - .groupId(data.getGroupId()) - .description(data.getDescription()) - .labels(data.getLabels()); + GroupMetaDataDto.GroupMetaDataDtoBuilder group = GroupMetaDataDto.builder().groupId(data.getGroupId()) + .description(data.getDescription()).labels(data.getLabels()); String user = securityIdentity.getPrincipal().getName(); group.owner(user).createdOn(new Date().getTime()); @@ -308,7 +313,7 @@ public List listArtifactRules(String groupId, String artifactId) { * @see io.apicurio.registry.rest.v3.GroupsResource#createArtifactRule(String, String, CreateRule) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void createArtifactRule(String groupId, String artifactId, CreateRule data) { requireParameter("groupId", groupId); @@ -327,14 +332,16 @@ public void createArtifactRule(String groupId, String artifactId, CreateRule dat throw new ArtifactNotFoundException(groupId, artifactId); } - storage.createArtifactRule(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, data.getRuleType(), config); + storage.createArtifactRule(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, + data.getRuleType(), config); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactRules(java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactRules(java.lang.String, + * java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactRules(String groupId, String artifactId) { requireParameter("groupId", groupId); @@ -344,7 +351,8 @@ public void deleteArtifactRules(String groupId, String artifactId) { } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactRuleConfig(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactRuleConfig(java.lang.String, + * java.lang.String, io.apicurio.registry.types.RuleType) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -353,7 +361,8 @@ public Rule getArtifactRuleConfig(String groupId, String artifactId, RuleType ru requireParameter("artifactId", artifactId); requireParameter("ruleType", ruleType); - RuleConfigurationDto dto = storage.getArtifactRule(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, ruleType); + RuleConfigurationDto dto = storage.getArtifactRule(new GroupId(groupId).getRawGroupIdWithNull(), + artifactId, ruleType); Rule rval = new Rule(); rval.setConfig(dto.getConfiguration()); rval.setRuleType(ruleType); @@ -361,10 +370,12 @@ public Rule getArtifactRuleConfig(String groupId, String artifactId, RuleType ru } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactRuleConfig(String, String, RuleType, Rule) + * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactRuleConfig(String, String, RuleType, + * Rule) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE, "3", KEY_RULE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE, "3", + KEY_RULE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public Rule updateArtifactRuleConfig(String groupId, String artifactId, RuleType ruleType, Rule data) { requireParameter("groupId", groupId); @@ -381,10 +392,11 @@ public Rule updateArtifactRuleConfig(String groupId, String artifactId, RuleType } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactRule(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType) + * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactRule(java.lang.String, java.lang.String, + * io.apicurio.registry.types.RuleType) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_RULE_TYPE }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) { requireParameter("groupId", groupId); @@ -395,11 +407,13 @@ public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionContent(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.HandleReferencesType) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionContent(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.HandleReferencesType) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public Response getArtifactVersionContent(String groupId, String artifactId, String versionExpression, HandleReferencesType references) { + public Response getArtifactVersionContent(String groupId, String artifactId, String versionExpression, + HandleReferencesType references) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("versionExpression", versionExpression); @@ -411,28 +425,34 @@ public Response getArtifactVersionContent(String groupId, String artifactId, Str references = HandleReferencesType.PRESERVE; } - ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()); + ArtifactVersionMetaDataDto metaData = storage.getArtifactVersionMetaData(gav.getRawGroupIdWithNull(), + gav.getRawArtifactId(), gav.getRawVersionId()); if (VersionState.DISABLED.equals(metaData.getState())) { throw new VersionNotFoundException(groupId, artifactId, versionExpression); } - StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()); + StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(gav.getRawGroupIdWithNull(), + gav.getRawArtifactId(), gav.getRawVersionId()); TypedContent contentToReturn = TypedContent.create(artifact.getContent(), artifact.getContentType()); - contentToReturn = handleContentReferences(references, metaData.getArtifactType(), contentToReturn, artifact.getReferences()); + contentToReturn = handleContentReferences(references, metaData.getArtifactType(), contentToReturn, + artifact.getReferences()); - Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), artifact.getContentType()); + Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), + artifact.getContentType()); checkIfDeprecated(metaData::getState, groupId, artifactId, versionExpression, builder); return builder.build(); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactVersion(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactVersion(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteArtifactVersion(String groupId, String artifactId, String version) { if (!restConfig.isArtifactVersionDeletionEnabled()) { - throw new NotAllowedException("Artifact version deletion operation is not enabled.", HttpMethod.GET, (String[]) null); + throw new NotAllowedException("Artifact version deletion operation is not enabled.", + HttpMethod.GET, (String[]) null); } requireParameter("groupId", groupId); @@ -442,11 +462,13 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), version, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.DEFAULT)); - storage.deleteArtifactVersion(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()); + storage.deleteArtifactVersion(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId()); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -458,17 +480,21 @@ public VersionMetaData getArtifactVersionMetaData(String groupId, String artifac var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), version, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.SKIP_DISABLED_LATEST)); - ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaData(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()); + ArtifactVersionMetaDataDto dto = storage.getArtifactVersionMetaData(gav.getRawGroupIdWithNull(), + gav.getRawArtifactId(), gav.getRawVersionId()); return V3ApiUtil.dtoToVersionMetaData(dto); } - + /** - * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.EditableVersionMetaData) + * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.EditableVersionMetaData) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", KEY_EDITABLE_METADATA}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + KEY_EDITABLE_METADATA }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateArtifactVersionMetaData(String groupId, String artifactId, String versionExpression, EditableVersionMetaData data) { + public void updateArtifactVersionMetaData(String groupId, String artifactId, String versionExpression, + EditableVersionMetaData data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("versionExpression", versionExpression); @@ -481,16 +507,19 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str dto.setDescription(data.getDescription()); dto.setLabels(data.getLabels()); dto.setState(data.getState()); - storage.updateArtifactVersionMetaData(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId(), dto); + storage.updateArtifactVersionMetaData(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId(), dto); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#addArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.NewComment) + * @see io.apicurio.registry.rest.v3.GroupsResource#addArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.NewComment) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public Comment addArtifactVersionComment(String groupId, String artifactId, String versionExpression, NewComment data) { + public Comment addArtifactVersionComment(String groupId, String artifactId, String versionExpression, + NewComment data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("versionExpression", versionExpression); @@ -498,18 +527,21 @@ public Comment addArtifactVersionComment(String groupId, String artifactId, Stri var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), versionExpression, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.DEFAULT)); - CommentDto newComment = storage.createArtifactVersionComment(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), - gav.getRawVersionId(), data.getValue()); + CommentDto newComment = storage.createArtifactVersionComment(gav.getRawGroupIdWithNull(), + gav.getRawArtifactId(), gav.getRawVersionId(), data.getValue()); return V3ApiUtil.commentDtoToComment(newComment); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", "comment_id"}) // TODO + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + "comment_id" }) // TODO @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void deleteArtifactVersionComment(String groupId, String artifactId, String versionExpression, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String versionExpression, + String commentId) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("versionExpression", versionExpression); @@ -518,11 +550,13 @@ public void deleteArtifactVersionComment(String groupId, String artifactId, Stri var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), versionExpression, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.DEFAULT)); - storage.deleteArtifactVersionComment(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId(), commentId); + storage.deleteArtifactVersionComment(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId(), commentId); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionComments(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.rest.v3.GroupsResource#getArtifactVersionComments(java.lang.String, + * java.lang.String, java.lang.String) */ @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) @@ -534,19 +568,21 @@ public List getArtifactVersionComments(String groupId, String artifactI var gav = VersionExpressionParser.parse(new GA(groupId, artifactId), version, (ga, branchId) -> storage.getBranchTip(ga, branchId, RetrievalBehavior.DEFAULT)); - return storage.getArtifactVersionComments(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), gav.getRawVersionId()) - .stream() - .map(V3ApiUtil::commentDtoToComment) - .collect(toList()); + return storage.getArtifactVersionComments(gav.getRawGroupIdWithNull(), gav.getRawArtifactId(), + gav.getRawVersionId()).stream().map(V3ApiUtil::commentDtoToComment).collect(toList()); } /** - * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.rest.v3.beans.NewComment) + * @see io.apicurio.registry.rest.v3.GroupsResource#updateArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String, + * io.apicurio.registry.rest.v3.beans.NewComment) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", "comment_id"}) // TODO + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", KEY_VERSION, "3", + "comment_id" }) // TODO @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateArtifactVersionComment(String groupId, String artifactId, String versionExpression, String commentId, NewComment data) { + public void updateArtifactVersionComment(String groupId, String artifactId, String versionExpression, + String commentId, NewComment data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("versionExpression", versionExpression); @@ -563,7 +599,7 @@ public void updateArtifactVersionComment(String groupId, String artifactId, Stri @Override @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Read) public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger limit, BigInteger offset, - SortOrder order, ArtifactSortBy orderby) { + SortOrder order, ArtifactSortBy orderby) { requireParameter("groupId", groupId); if (orderby == null) { @@ -577,12 +613,14 @@ public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger lim } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet<>(); filters.add(SearchFilter.ofGroupId(new GroupId(groupId).getRawGroupIdWithNull())); - ArtifactSearchResultsDto resultsDto = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto resultsDto = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(resultsDto); } @@ -590,7 +628,7 @@ public ArtifactSearchResults listArtifactsInGroup(String groupId, BigInteger lim * @see io.apicurio.registry.rest.v3.GroupsResource#deleteArtifactsInGroup(java.lang.String) */ @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID }) @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write) public void deleteArtifactsInGroup(String groupId) { requireParameter("groupId", groupId); @@ -599,14 +637,17 @@ public void deleteArtifactsInGroup(String groupId) { } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_IF_EXISTS, "2", KEY_CANONICAL, "3", "dryRun"}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_IF_EXISTS, "2", KEY_CANONICAL, "3", "dryRun" }) @Authorized(style = AuthorizedStyle.GroupOnly, level = AuthorizedLevel.Write, dryRunParam = 3) - public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists ifExists, Boolean canonical, Boolean dryRun, CreateArtifact data) { + public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists ifExists, Boolean canonical, + Boolean dryRun, CreateArtifact data) { requireParameter("groupId", groupId); if (data.getFirstVersion() != null) { requireParameter("body.firstVersion.content", data.getFirstVersion().getContent()); - requireParameter("body.firstVersion.content.content", data.getFirstVersion().getContent().getContent()); - requireParameter("body.firstVersion.content.contentType", data.getFirstVersion().getContent().getContentType()); + requireParameter("body.firstVersion.content.content", + data.getFirstVersion().getContent().getContent()); + requireParameter("body.firstVersion.content.contentType", + data.getFirstVersion().getContent().getContentType()); if (data.getFirstVersion().getBranches() == null) { data.getFirstVersion().setBranches(Collections.emptyList()); } @@ -617,26 +658,27 @@ public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists if } // TODO Mitigation for MITM attacks, verify that the artifact is the expected one -// if (xRegistryContentHash != null) { -// String calculatedSha = null; -// try { -// RegistryHashAlgorithm algorithm = (xRegistryHashAlgorithm == null) ? RegistryHashAlgorithm.SHA256 : RegistryHashAlgorithm.valueOf(xRegistryHashAlgorithm); -// switch (algorithm) { -// case MD5: -// calculatedSha = Hashing.md5().hashString(content.content(), StandardCharsets.UTF_8).toString(); -// break; -// case SHA256: -// calculatedSha = Hashing.sha256().hashString(content.content(), StandardCharsets.UTF_8).toString(); -// break; -// } -// } catch (Exception e) { -// throw new BadRequestException("Requested hash algorithm not supported"); -// } -// -// if (!calculatedSha.equals(xRegistryContentHash.trim())) { -// throw new BadRequestException("Provided Artifact Hash doesn't match with the content"); -// } -// } + // if (xRegistryContentHash != null) { + // String calculatedSha = null; + // try { + // RegistryHashAlgorithm algorithm = (xRegistryHashAlgorithm == null) ? RegistryHashAlgorithm.SHA256 : + // RegistryHashAlgorithm.valueOf(xRegistryHashAlgorithm); + // switch (algorithm) { + // case MD5: + // calculatedSha = Hashing.md5().hashString(content.content(), StandardCharsets.UTF_8).toString(); + // break; + // case SHA256: + // calculatedSha = Hashing.sha256().hashString(content.content(), StandardCharsets.UTF_8).toString(); + // break; + // } + // } catch (Exception e) { + // throw new BadRequestException("Requested hash algorithm not supported"); + // } + // + // if (!calculatedSha.equals(xRegistryContentHash.trim())) { + // throw new BadRequestException("Provided Artifact Hash doesn't match with the content"); + // } + // } final boolean fcanonical = canonical == null ? Boolean.FALSE : canonical; String artifactId = data.getArtifactId(); @@ -656,7 +698,8 @@ public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists if } TypedContent typedContent = TypedContent.create(content, contentType); - String artifactType = ArtifactTypeUtil.determineArtifactType(typedContent, data.getArtifactType(), factory); + String artifactType = ArtifactTypeUtil.determineArtifactType(typedContent, data.getArtifactType(), + factory); // Convert references to DTOs final List referencesAsDtos = toReferenceDtos(references); @@ -665,30 +708,23 @@ public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists if final Map resolvedReferences = storage.resolveReferences(referencesAsDtos); // Apply any configured rules - rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, typedContent, - RuleApplicationType.CREATE, references, resolvedReferences); + rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, + typedContent, RuleApplicationType.CREATE, references, resolvedReferences); // Create the artifact (with optional first version) EditableArtifactMetaDataDto artifactMetaData = EditableArtifactMetaDataDto.builder() - .description(data.getDescription()) - .name(data.getName()) - .labels(data.getLabels()) - .build(); + .description(data.getDescription()).name(data.getName()).labels(data.getLabels()).build(); String firstVersion = null; ContentWrapperDto firstVersionContent = null; EditableVersionMetaDataDto firstVersionMetaData = null; List firstVersionBranches = null; if (data.getFirstVersion() != null) { firstVersion = data.getFirstVersion().getVersion(); - firstVersionContent = ContentWrapperDto.builder() - .content(content) - .contentType(contentType) - .references(referencesAsDtos) - .build(); + firstVersionContent = ContentWrapperDto.builder().content(content).contentType(contentType) + .references(referencesAsDtos).build(); firstVersionMetaData = EditableVersionMetaDataDto.builder() .description(data.getFirstVersion().getDescription()) - .name(data.getFirstVersion().getName()) - .labels(data.getFirstVersion().getLabels()) + .name(data.getFirstVersion().getName()).labels(data.getFirstVersion().getLabels()) .build(); firstVersionBranches = data.getFirstVersion().getBranches(); } @@ -696,30 +732,22 @@ public CreateArtifactResponse createArtifact(String groupId, IfArtifactExists if // Don't actually do anything if "dryRun" is 'true' if (dryRun != null && dryRun) { return CreateArtifactResponse.builder() - .artifact(ArtifactMetaData.builder() - .groupId(groupId) - .artifactId(artifactId) - .createdOn(new Date()) - .owner(securityIdentity.getPrincipal().getName()) - .modifiedBy(securityIdentity.getPrincipal().getName()) - .modifiedOn(new Date()) + .artifact(ArtifactMetaData.builder().groupId(groupId).artifactId(artifactId) + .createdOn(new Date()).owner(securityIdentity.getPrincipal().getName()) + .modifiedBy(securityIdentity.getPrincipal().getName()).modifiedOn(new Date()) .name(artifactMetaData.getName()) .description(artifactMetaData.getDescription()) - .labels(artifactMetaData.getLabels()) - .artifactType(artifactType) - .build()) + .labels(artifactMetaData.getLabels()).artifactType(artifactType).build()) .build(); } Pair storageResult = storage.createArtifact( - new GroupId(groupId).getRawGroupIdWithNull(), - artifactId, artifactType, artifactMetaData, firstVersion, firstVersionContent, - firstVersionMetaData, firstVersionBranches); + new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, artifactMetaData, + firstVersion, firstVersionContent, firstVersionMetaData, firstVersionBranches); // Now return both the artifact metadata and (if available) the version metadata CreateArtifactResponse rval = CreateArtifactResponse.builder() - .artifact(V3ApiUtil.dtoToArtifactMetaData(storageResult.getLeft())) - .build(); + .artifact(V3ApiUtil.dtoToArtifactMetaData(storageResult.getLeft())).build(); if (storageResult.getRight() != null) { rval.setVersion(V3ApiUtil.dtoToVersionMetaData(storageResult.getRight())); } @@ -751,20 +779,22 @@ public VersionSearchResults listArtifactVersions(String groupId, String artifact storage.getArtifactMetaData(gid.getRawGroupIdWithNull(), artifactId); final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.desc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.desc ? OrderDirection.asc + : OrderDirection.desc; Set filters = Set.of( SearchFilter.ofGroupId(new GroupId(groupId).getRawGroupIdWithNull()), - SearchFilter.ofArtifactId(artifactId) - ); - VersionSearchResultsDto resultsDto = storage.searchVersions(filters, oBy, oDir, offset.intValue(), limit.intValue()); + SearchFilter.ofArtifactId(artifactId)); + VersionSearchResultsDto resultsDto = storage.searchVersions(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(resultsDto); } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", "dryRun"}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID, "2", "dryRun" }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write, dryRunParam = 2) - public VersionMetaData createArtifactVersion(String groupId, String artifactId, Boolean dryRun, CreateVersion data) { + public VersionMetaData createArtifactVersion(String groupId, String artifactId, Boolean dryRun, + CreateVersion data) { requireParameter("content", data.getContent()); requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); @@ -779,52 +809,41 @@ public VersionMetaData createArtifactVersion(String groupId, String artifactId, String ct = data.getContent().getContentType(); // Transform the given references into dtos - final List referencesAsDtos = toReferenceDtos(data.getContent().getReferences()); + final List referencesAsDtos = toReferenceDtos( + data.getContent().getReferences()); // Try to resolve the new artifact references and the nested ones (if any) final Map resolvedReferences = storage.resolveReferences(referencesAsDtos); String artifactType = lookupArtifactType(groupId, artifactId); TypedContent typedContent = TypedContent.create(content, ct); - rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, typedContent, - RuleApplicationType.UPDATE, data.getContent().getReferences(), resolvedReferences); + rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, + typedContent, RuleApplicationType.UPDATE, data.getContent().getReferences(), + resolvedReferences); EditableVersionMetaDataDto metaDataDto = EditableVersionMetaDataDto.builder() - .description(data.getDescription()) - .name(data.getName()) - .labels(data.getLabels()) - .build(); - ContentWrapperDto contentDto = ContentWrapperDto.builder() - .contentType(ct) - .content(content) - .references(referencesAsDtos) - .build(); + .description(data.getDescription()).name(data.getName()).labels(data.getLabels()).build(); + ContentWrapperDto contentDto = ContentWrapperDto.builder().contentType(ct).content(content) + .references(referencesAsDtos).build(); // Don't actually do anything if "dryRun" is 'true' if (dryRun != null && dryRun) { - return VersionMetaData.builder() - .groupId(groupId) - .artifactId(artifactId) - .version(data.getVersion() == null ? "0" : data.getVersion()) - .createdOn(new Date()) - .owner(securityIdentity.getPrincipal().getName()) - .contentId(-1L) - .name(metaDataDto.getName()) - .description(metaDataDto.getDescription()) - .labels(metaDataDto.getLabels()) - .state(VersionState.ENABLED) - .globalId(-1L) - .artifactType(artifactType) - .build(); + return VersionMetaData.builder().groupId(groupId).artifactId(artifactId) + .version(data.getVersion() == null ? "0" : data.getVersion()).createdOn(new Date()) + .owner(securityIdentity.getPrincipal().getName()).contentId(-1L) + .name(metaDataDto.getName()).description(metaDataDto.getDescription()) + .labels(metaDataDto.getLabels()).state(VersionState.ENABLED).globalId(-1L) + .artifactType(artifactType).build(); } - ArtifactVersionMetaDataDto vmd = storage.createArtifactVersion(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, data.getVersion(), - artifactType, contentDto, metaDataDto, data.getBranches()); + ArtifactVersionMetaDataDto vmd = storage.createArtifactVersion( + new GroupId(groupId).getRawGroupIdWithNull(), artifactId, data.getVersion(), artifactType, + contentDto, metaDataDto, data.getBranches()); return V3ApiUtil.dtoToVersionMetaData(vmd); } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public BranchMetaData createBranch(String groupId, String artifactId, CreateBranch data) { requireParameter("groupId", groupId); @@ -833,13 +852,15 @@ public BranchMetaData createBranch(String groupId, String artifactId, CreateBran GA ga = new GA(groupId, artifactId); BranchId bid = new BranchId(data.getBranchId()); - BranchMetaDataDto branchDto = storage.createBranch(ga, bid, data.getDescription(), data.getVersions()); + BranchMetaDataDto branchDto = storage.createBranch(ga, bid, data.getDescription(), + data.getVersions()); return V3ApiUtil.dtoToBranchMetaData(branchDto); } @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public BranchSearchResults listBranches(String groupId, String artifactId, BigInteger offset, BigInteger limit) { + public BranchSearchResults listBranches(String groupId, String artifactId, BigInteger offset, + BigInteger limit) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); @@ -850,7 +871,8 @@ public BranchSearchResults listBranches(String groupId, String artifactId, BigIn limit = BigInteger.valueOf(20); } - BranchSearchResultsDto dto = storage.getBranches(new GA(groupId, artifactId), offset.intValue(), limit.intValue()); + BranchSearchResultsDto dto = storage.getBranches(new GA(groupId, artifactId), offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(dto); } @@ -860,26 +882,27 @@ public BranchMetaData getBranchMetaData(String groupId, String artifactId, Strin requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); - BranchMetaDataDto branch = storage.getBranchMetaData(new GA(groupId, artifactId), new BranchId(branchId)); + BranchMetaDataDto branch = storage.getBranchMetaData(new GA(groupId, artifactId), + new BranchId(branchId)); return V3ApiUtil.dtoToBranchMetaData(branch); } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void updateBranchMetaData(String groupId, String artifactId, String branchId, EditableBranchMetaData data) { + public void updateBranchMetaData(String groupId, String artifactId, String branchId, + EditableBranchMetaData data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("branchId", branchId); - EditableBranchMetaDataDto dto = EditableBranchMetaDataDto.builder() - .description(data.getDescription()) + EditableBranchMetaDataDto dto = EditableBranchMetaDataDto.builder().description(data.getDescription()) .build(); storage.updateBranchMetaData(new GA(groupId, artifactId), new BranchId(branchId), dto); } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) public void deleteBranch(String groupId, String artifactId, String branchId) { requireParameter("groupId", groupId); @@ -891,7 +914,8 @@ public void deleteBranch(String groupId, String artifactId, String branchId) { @Override @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Read) - public VersionSearchResults listBranchVersions(String groupId, String artifactId, String branchId, BigInteger offset, BigInteger limit) { + public VersionSearchResults listBranchVersions(String groupId, String artifactId, String branchId, + BigInteger offset, BigInteger limit) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("branchId", branchId); @@ -909,14 +933,16 @@ public VersionSearchResults listBranchVersions(String groupId, String artifactId // Throw 404 if the artifact or branch does not exist. storage.getBranchMetaData(ga, bid); - VersionSearchResultsDto results = storage.getBranchVersions(ga, bid, offset.intValue(), limit.intValue()); + VersionSearchResultsDto results = storage.getBranchVersions(ga, bid, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void replaceBranchVersions(String groupId, String artifactId, String branchId, ReplaceBranchVersions data) { + public void replaceBranchVersions(String groupId, String artifactId, String branchId, + ReplaceBranchVersions data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("branchId", branchId); @@ -932,9 +958,10 @@ public void replaceBranchVersions(String groupId, String artifactId, String bran } @Override - @Audited(extractParameters = {"0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID}) + @Audited(extractParameters = { "0", KEY_GROUP_ID, "1", KEY_ARTIFACT_ID }) @Authorized(style = AuthorizedStyle.GroupAndArtifact, level = AuthorizedLevel.Write) - public void addVersionToBranch(String groupId, String artifactId, String branchId, AddVersionToBranch data) { + public void addVersionToBranch(String groupId, String artifactId, String branchId, + AddVersionToBranch data) { requireParameter("groupId", groupId); requireParameter("artifactId", artifactId); requireParameter("branchId", branchId); @@ -959,7 +986,8 @@ public void addVersionToBranch(String groupId, String artifactId, String branchI * @param version * @param builder */ - private void checkIfDeprecated(Supplier stateSupplier, String groupId, String artifactId, String version, Response.ResponseBuilder builder) { + private void checkIfDeprecated(Supplier stateSupplier, String groupId, String artifactId, + String version, Response.ResponseBuilder builder) { HeadersHack.checkIfDeprecated(stateSupplier, groupId, artifactId, version, builder); } @@ -970,7 +998,8 @@ private void checkIfDeprecated(Supplier stateSupplier, String grou * @param artifactId */ private String lookupArtifactType(String groupId, String artifactId) { - return storage.getArtifactMetaData(new GroupId(groupId).getRawGroupIdWithNull(), artifactId).getArtifactType(); + return storage.getArtifactMetaData(new GroupId(groupId).getRawGroupIdWithNull(), artifactId) + .getArtifactType(); } private String getContentType(CreateArtifact data) { @@ -1000,7 +1029,8 @@ private static void requireParameter(String parameterName, Object parameterValue } } - private CreateArtifactResponse handleIfExists(String groupId, String artifactId, IfArtifactExists ifExists, CreateVersion theVersion, boolean canonical) { + private CreateArtifactResponse handleIfExists(String groupId, String artifactId, + IfArtifactExists ifExists, CreateVersion theVersion, boolean canonical) { if (ifExists == null || theVersion == null) { ifExists = IfArtifactExists.FAIL; } @@ -1008,11 +1038,13 @@ private CreateArtifactResponse handleIfExists(String groupId, String artifactId, switch (ifExists) { case CREATE_VERSION: return updateArtifactInternal(groupId, artifactId, theVersion); -// case RETURN: -// GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, ArtifactRetrievalBehavior.DEFAULT); -// ArtifactVersionMetaDataDto latestVersionMD = storage.getArtifactVersionMetaData(latestGAV.getRawGroupIdWithNull(), -// latestGAV.getRawArtifactId(), latestGAV.getRawVersionId()); -// return V3ApiUtil.dtoToVersionMetaData(latestVersionMD); + // case RETURN: + // GAV latestGAV = storage.getBranchTip(new GA(groupId, artifactId), BranchId.LATEST, + // ArtifactRetrievalBehavior.DEFAULT); + // ArtifactVersionMetaDataDto latestVersionMD = + // storage.getArtifactVersionMetaData(latestGAV.getRawGroupIdWithNull(), + // latestGAV.getRawArtifactId(), latestGAV.getRawVersionId()); + // return V3ApiUtil.dtoToVersionMetaData(latestVersionMD); case FIND_OR_CREATE_VERSION: return handleIfExistsReturnOrUpdate(groupId, artifactId, theVersion, canonical); default: @@ -1020,30 +1052,33 @@ private CreateArtifactResponse handleIfExists(String groupId, String artifactId, } } - private CreateArtifactResponse handleIfExistsReturnOrUpdate(String groupId, String artifactId, CreateVersion theVersion, boolean canonical) { + private CreateArtifactResponse handleIfExistsReturnOrUpdate(String groupId, String artifactId, + CreateVersion theVersion, boolean canonical) { try { // Find the version - TypedContent content = TypedContent.create(ContentHandle.create(theVersion.getContent().getContent()), theVersion.getContent().getContentType()); - List referenceDtos = toReferenceDtos(theVersion.getContent().getReferences()); + TypedContent content = TypedContent.create( + ContentHandle.create(theVersion.getContent().getContent()), + theVersion.getContent().getContentType()); + List referenceDtos = toReferenceDtos( + theVersion.getContent().getReferences()); ArtifactVersionMetaDataDto vmdDto = this.storage.getArtifactVersionMetaDataByContent( - new GroupId(groupId).getRawGroupIdWithNull(), artifactId, canonical, content, referenceDtos); + new GroupId(groupId).getRawGroupIdWithNull(), artifactId, canonical, content, + referenceDtos); VersionMetaData vmd = V3ApiUtil.dtoToVersionMetaData(vmdDto); // Need to also return the artifact metadata ArtifactMetaDataDto amdDto = this.storage.getArtifactMetaData(groupId, artifactId); ArtifactMetaData amd = V3ApiUtil.dtoToArtifactMetaData(amdDto); - return CreateArtifactResponse.builder() - .artifact(amd) - .version(vmd) - .build(); + return CreateArtifactResponse.builder().artifact(amd).version(vmd).build(); } catch (ArtifactNotFoundException nfe) { // This is OK - we'll update the artifact if there is no matching content already there. } return updateArtifactInternal(groupId, artifactId, theVersion); } - private CreateArtifactResponse updateArtifactInternal(String groupId, String artifactId, CreateVersion theVersion) { + private CreateArtifactResponse updateArtifactInternal(String groupId, String artifactId, + CreateVersion theVersion) { String version = theVersion.getVersion(); String name = theVersion.getName(); String description = theVersion.getDescription(); @@ -1055,35 +1090,27 @@ private CreateArtifactResponse updateArtifactInternal(String groupId, String art String artifactType = lookupArtifactType(groupId, artifactId); - //Transform the given references into dtos and set the contentId, this will also detect if any of the passed references does not exist. + // Transform the given references into dtos and set the contentId, this will also detect if any of the + // passed references does not exist. final List referencesAsDtos = toReferenceDtos(references); final Map resolvedReferences = storage.resolveReferences(referencesAsDtos); final TypedContent typedContent = TypedContent.create(content, contentType); - rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, typedContent, - RuleApplicationType.UPDATE, references, resolvedReferences); - EditableVersionMetaDataDto metaData = EditableVersionMetaDataDto.builder() - .name(name) - .description(description) - .labels(labels) - .build(); - ContentWrapperDto contentDto = ContentWrapperDto.builder() - .contentType(contentType) - .content(content) - .references(referencesAsDtos) - .build(); - ArtifactVersionMetaDataDto vmdDto = storage.createArtifactVersion(groupId, artifactId, version, artifactType, - contentDto, metaData, branches); + rulesService.applyRules(new GroupId(groupId).getRawGroupIdWithNull(), artifactId, artifactType, + typedContent, RuleApplicationType.UPDATE, references, resolvedReferences); + EditableVersionMetaDataDto metaData = EditableVersionMetaDataDto.builder().name(name) + .description(description).labels(labels).build(); + ContentWrapperDto contentDto = ContentWrapperDto.builder().contentType(contentType).content(content) + .references(referencesAsDtos).build(); + ArtifactVersionMetaDataDto vmdDto = storage.createArtifactVersion(groupId, artifactId, version, + artifactType, contentDto, metaData, branches); VersionMetaData vmd = V3ApiUtil.dtoToVersionMetaData(vmdDto); // Need to also return the artifact metadata ArtifactMetaDataDto amdDto = this.storage.getArtifactMetaData(groupId, artifactId); ArtifactMetaData amd = V3ApiUtil.dtoToArtifactMetaData(amdDto); - return CreateArtifactResponse.builder() - .artifact(amd) - .version(vmd) - .build(); + return CreateArtifactResponse.builder().artifact(amd).version(vmd).build(); } private List toReferenceDtos(List references) { @@ -1092,8 +1119,7 @@ private List toReferenceDtos(List refer } return references.stream() .peek(r -> r.setGroupId(new GroupId(r.getGroupId()).getRawGroupIdWithNull())) - .map(V3ApiUtil::referenceToDto) - .collect(toList()); + .map(V3ApiUtil::referenceToDto).collect(toList()); } /** @@ -1104,35 +1130,32 @@ private List toReferenceDtos(List refer private InputStream fetchContentFromURL(Client client, URI url) { try { // 1. Registry issues HTTP HEAD request to the target URL. - List contentLengthHeaders = client - .target(url) - .request() - .head() - .getHeaders() + List contentLengthHeaders = client.target(url).request().head().getHeaders() .get("Content-Length"); if (contentLengthHeaders == null || contentLengthHeaders.size() < 1) { - throw new BadRequestException("Requested resource URL does not provide 'Content-Length' in the headers"); + throw new BadRequestException( + "Requested resource URL does not provide 'Content-Length' in the headers"); } // 2. According to HTTP specification, target server must return Content-Length header. int contentLength = Integer.parseInt(contentLengthHeaders.get(0).toString()); - // 3. Registry analyzes value of Content-Length to check if file with declared size could be processed securely. + // 3. Registry analyzes value of Content-Length to check if file with declared size could be + // processed securely. if (contentLength > restConfig.getDownloadMaxSize()) { - throw new BadRequestException("Requested resource is bigger than " + restConfig.getDownloadMaxSize() + " and cannot be downloaded."); + throw new BadRequestException("Requested resource is bigger than " + + restConfig.getDownloadMaxSize() + " and cannot be downloaded."); } if (contentLength <= 0) { throw new BadRequestException("Requested resource URL is providing 'Content-Length' <= 0."); } - // 4. Finally, registry issues HTTP GET to the target URL and fetches only amount of bytes specified by HTTP HEAD from step 1. - return new BufferedInputStream(client - .target(url) - .request() - .get() - .readEntity(InputStream.class), contentLength); + // 4. Finally, registry issues HTTP GET to the target URL and fetches only amount of bytes + // specified by HTTP HEAD from step 1. + return new BufferedInputStream(client.target(url).request().get().readEntity(InputStream.class), + contentLength); } catch (BadRequestException bre) { throw bre; } catch (Exception e) { diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/IdsResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/IdsResourceImpl.java index 3e7a15ede2..6e373adbdb 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/IdsResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/IdsResourceImpl.java @@ -29,14 +29,15 @@ import java.util.stream.Collectors; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class IdsResourceImpl extends AbstractResourceImpl implements IdsResource { @Inject CommonResourceOperations common; - private void checkIfDeprecated(Supplier stateSupplier, String artifactId, String version, Response.ResponseBuilder builder) { + private void checkIfDeprecated(Supplier stateSupplier, String artifactId, String version, + Response.ResponseBuilder builder) { HeadersHack.checkIfDeprecated(stateSupplier, null, artifactId, version, builder); } @@ -52,7 +53,8 @@ public Response getContentById(long contentId) { } /** - * @see io.apicurio.registry.rest.v3.IdsResource#getContentByGlobalId(long, io.apicurio.registry.rest.v3.beans.HandleReferencesType) + * @see io.apicurio.registry.rest.v3.IdsResource#getContentByGlobalId(long, + * io.apicurio.registry.rest.v3.beans.HandleReferencesType) */ @Override @Authorized(style = AuthorizedStyle.GlobalId, level = AuthorizedLevel.Read) @@ -69,9 +71,11 @@ public Response getContentByGlobalId(long globalId, HandleReferencesType referen StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(globalId); TypedContent contentToReturn = TypedContent.create(artifact.getContent(), artifact.getContentType()); - contentToReturn = handleContentReferences(references, metaData.getArtifactType(), contentToReturn, artifact.getReferences()); + contentToReturn = handleContentReferences(references, metaData.getArtifactType(), contentToReturn, + artifact.getReferences()); - Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), contentToReturn.getContentType()); + Response.ResponseBuilder builder = Response.ok(contentToReturn.getContent(), + contentToReturn.getContentType()); checkIfDeprecated(metaData::getState, metaData.getArtifactId(), metaData.getVersion(), builder); return builder.build(); } @@ -101,26 +105,25 @@ public List referencesByContentHash(String contentHash) { @Override public List referencesByContentId(long contentId) { ContentWrapperDto artifact = storage.getContentById(contentId); - return artifact.getReferences().stream() - .map(V3ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V3ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } /** - * @see io.apicurio.registry.rest.v3.IdsResource#referencesByGlobalId(long, io.apicurio.registry.types.ReferenceType) + * @see io.apicurio.registry.rest.v3.IdsResource#referencesByGlobalId(long, + * io.apicurio.registry.types.ReferenceType) */ @Override public List referencesByGlobalId(long globalId, ReferenceType refType) { if (refType == ReferenceType.OUTBOUND || refType == null) { StoredArtifactVersionDto artifact = storage.getArtifactVersionContent(globalId); - return artifact.getReferences().stream() - .map(V3ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V3ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } else { ArtifactVersionMetaDataDto vmd = storage.getArtifactVersionMetaData(globalId); - return storage.getInboundArtifactReferences(vmd.getGroupId(), vmd.getArtifactId(), vmd.getVersion()).stream() - .map(V3ApiUtil::referenceDtoToReference) - .collect(Collectors.toList()); + return storage + .getInboundArtifactReferences(vmd.getGroupId(), vmd.getArtifactId(), vmd.getVersion()) + .stream().map(V3ApiUtil::referenceDtoToReference).collect(Collectors.toList()); } } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/SearchResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/SearchResourceImpl.java index d1cb444858..4a112c787e 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/SearchResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/SearchResourceImpl.java @@ -40,7 +40,7 @@ import java.util.Set; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SearchResourceImpl implements SearchResource { @@ -58,11 +58,10 @@ public class SearchResourceImpl implements SearchResource { RegistryStorageContentUtils contentUtils; @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) - public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, BigInteger limit, SortOrder order, - ArtifactSortBy orderby, List labels, String description, String groupId, Long globalId, Long contentId, - String artifactId) - { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) + public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, BigInteger limit, + SortOrder order, ArtifactSortBy orderby, List labels, String description, String groupId, + Long globalId, Long contentId, String artifactId) { if (orderby == null) { orderby = ArtifactSortBy.name; } @@ -74,7 +73,8 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet(); if (!StringUtil.isEmpty(name)) { @@ -88,27 +88,27 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big } if (labels != null && !labels.isEmpty()) { - labels.stream() - .map(prop -> { - int delimiterIndex = prop.indexOf(":"); - String labelKey; - String labelValue; - if (delimiterIndex == 0) { - throw new BadRequestException("label search filter wrong formatted, missing left side of ':' delimiter"); - } - if (delimiterIndex == (prop.length() - 1)) { - throw new BadRequestException("label search filter wrong formatted, missing right side of ':' delimiter"); - } - if (delimiterIndex < 0) { - labelKey = prop; - labelValue = null; - } else{ - labelKey = prop.substring(0, delimiterIndex); - labelValue = prop.substring(delimiterIndex + 1); - } - return SearchFilter.ofLabel(labelKey, labelValue); - }) - .forEach(filters::add); + labels.stream().map(prop -> { + int delimiterIndex = prop.indexOf(":"); + String labelKey; + String labelValue; + if (delimiterIndex == 0) { + throw new BadRequestException( + "label search filter wrong formatted, missing left side of ':' delimiter"); + } + if (delimiterIndex == (prop.length() - 1)) { + throw new BadRequestException( + "label search filter wrong formatted, missing right side of ':' delimiter"); + } + if (delimiterIndex < 0) { + labelKey = prop; + labelValue = null; + } else { + labelKey = prop.substring(0, delimiterIndex); + labelValue = prop.substring(delimiterIndex + 1); + } + return SearchFilter.ofLabel(labelKey, labelValue); + }).forEach(filters::add); } if (globalId != null && globalId > 0) { filters.add(SearchFilter.ofGlobalId(globalId)); @@ -117,14 +117,16 @@ public ArtifactSearchResults searchArtifacts(String name, BigInteger offset, Big filters.add(SearchFilter.ofContentId(contentId)); } - ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.Read) - public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String artifactType, String groupId, - BigInteger offset, BigInteger limit, SortOrder order, ArtifactSortBy orderby, InputStream data) { + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.Read) + public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String artifactType, + String groupId, BigInteger offset, BigInteger limit, SortOrder order, ArtifactSortBy orderby, + InputStream data) { if (orderby == null) { orderby = ArtifactSortBy.name; @@ -136,7 +138,8 @@ public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String limit = BigInteger.valueOf(20); } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; if (canonical == null) { canonical = Boolean.FALSE; @@ -150,7 +153,8 @@ public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String Set filters = new HashSet(); if (canonical && artifactType != null) { - String canonicalHash = contentUtils.getCanonicalContentHash(typedContent, artifactType, null, null); + String canonicalHash = contentUtils.getCanonicalContentHash(typedContent, artifactType, null, + null); filters.add(SearchFilter.ofCanonicalHash(canonicalHash)); } else if (!canonical) { String contentHash = content.getSha256Hash(); @@ -162,13 +166,14 @@ public ArtifactSearchResults searchArtifactsByContent(Boolean canonical, String filters.add(SearchFilter.ofGroupId(new GroupId(groupId).getRawGroupIdWithNull())); } - ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), limit.intValue()); + ArtifactSearchResultsDto results = storage.searchArtifacts(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } @Override - public GroupSearchResults searchGroups(BigInteger offset, BigInteger limit, SortOrder order, GroupSortBy orderby, - List labels, String description, String groupId) { + public GroupSearchResults searchGroups(BigInteger offset, BigInteger limit, SortOrder order, + GroupSortBy orderby, List labels, String description, String groupId) { if (orderby == null) { orderby = GroupSortBy.groupId; } @@ -180,7 +185,8 @@ public GroupSearchResults searchGroups(BigInteger offset, BigInteger limit, Sort } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = order == null || order == SortOrder.asc ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet(); if (!StringUtil.isEmpty(groupId)) { @@ -191,37 +197,38 @@ public GroupSearchResults searchGroups(BigInteger offset, BigInteger limit, Sort } if (labels != null && !labels.isEmpty()) { - labels.stream() - .map(prop -> { - int delimiterIndex = prop.indexOf(":"); - String labelKey; - String labelValue; - if (delimiterIndex == 0) { - throw new BadRequestException("label search filter wrong formatted, missing left side of ':' delimiter"); - } - if (delimiterIndex == (prop.length() - 1)) { - throw new BadRequestException("label search filter wrong formatted, missing right side of ':' delimiter"); - } - if (delimiterIndex < 0) { - labelKey = prop; - labelValue = null; - } else{ - labelKey = prop.substring(0, delimiterIndex); - labelValue = prop.substring(delimiterIndex + 1); - } - return SearchFilter.ofLabel(labelKey, labelValue); - }) - .forEach(filters::add); - } - - GroupSearchResultsDto results = storage.searchGroups(filters, oBy, oDir, offset.intValue(), limit.intValue()); + labels.stream().map(prop -> { + int delimiterIndex = prop.indexOf(":"); + String labelKey; + String labelValue; + if (delimiterIndex == 0) { + throw new BadRequestException( + "label search filter wrong formatted, missing left side of ':' delimiter"); + } + if (delimiterIndex == (prop.length() - 1)) { + throw new BadRequestException( + "label search filter wrong formatted, missing right side of ':' delimiter"); + } + if (delimiterIndex < 0) { + labelKey = prop; + labelValue = null; + } else { + labelKey = prop.substring(0, delimiterIndex); + labelValue = prop.substring(delimiterIndex + 1); + } + return SearchFilter.ofLabel(labelKey, labelValue); + }).forEach(filters::add); + } + + GroupSearchResultsDto results = storage.searchGroups(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } @Override - public VersionSearchResults searchVersions(String version, BigInteger offset, BigInteger limit, SortOrder order, - VersionSortBy orderby, List labels, String description, String groupId, Long globalId, Long contentId, - String artifactId, String name) { + public VersionSearchResults searchVersions(String version, BigInteger offset, BigInteger limit, + SortOrder order, VersionSortBy orderby, List labels, String description, String groupId, + Long globalId, Long contentId, String artifactId, String name) { if (orderby == null) { orderby = VersionSortBy.globalId; } @@ -233,7 +240,8 @@ public VersionSearchResults searchVersions(String version, BigInteger offset, Bi } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet(); if (!StringUtil.isEmpty(groupId)) { @@ -253,27 +261,27 @@ public VersionSearchResults searchVersions(String version, BigInteger offset, Bi filters.add(SearchFilter.ofDescription(description)); } if (labels != null && !labels.isEmpty()) { - labels.stream() - .map(prop -> { - int delimiterIndex = prop.indexOf(":"); - String labelKey; - String labelValue; - if (delimiterIndex == 0) { - throw new BadRequestException("label search filter wrong formatted, missing left side of ':' delimiter"); - } - if (delimiterIndex == (prop.length() - 1)) { - throw new BadRequestException("label search filter wrong formatted, missing right side of ':' delimiter"); - } - if (delimiterIndex < 0) { - labelKey = prop; - labelValue = null; - } else{ - labelKey = prop.substring(0, delimiterIndex); - labelValue = prop.substring(delimiterIndex + 1); - } - return SearchFilter.ofLabel(labelKey, labelValue); - }) - .forEach(filters::add); + labels.stream().map(prop -> { + int delimiterIndex = prop.indexOf(":"); + String labelKey; + String labelValue; + if (delimiterIndex == 0) { + throw new BadRequestException( + "label search filter wrong formatted, missing left side of ':' delimiter"); + } + if (delimiterIndex == (prop.length() - 1)) { + throw new BadRequestException( + "label search filter wrong formatted, missing right side of ':' delimiter"); + } + if (delimiterIndex < 0) { + labelKey = prop; + labelValue = null; + } else { + labelKey = prop.substring(0, delimiterIndex); + labelValue = prop.substring(delimiterIndex + 1); + } + return SearchFilter.ofLabel(labelKey, labelValue); + }).forEach(filters::add); } if (globalId != null && globalId > 0) { filters.add(SearchFilter.ofGlobalId(globalId)); @@ -282,13 +290,15 @@ public VersionSearchResults searchVersions(String version, BigInteger offset, Bi filters.add(SearchFilter.ofContentId(contentId)); } - VersionSearchResultsDto results = storage.searchVersions(filters, oBy, oDir, offset.intValue(), limit.intValue()); + VersionSearchResultsDto results = storage.searchVersions(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } @Override - public VersionSearchResults searchVersionsByContent(Boolean canonical, String artifactType, BigInteger offset, - BigInteger limit, SortOrder order, VersionSortBy orderby, String groupId, String artifactId, InputStream data) { + public VersionSearchResults searchVersionsByContent(Boolean canonical, String artifactType, + BigInteger offset, BigInteger limit, SortOrder order, VersionSortBy orderby, String groupId, + String artifactId, InputStream data) { if (orderby == null) { orderby = VersionSortBy.globalId; @@ -301,7 +311,8 @@ public VersionSearchResults searchVersionsByContent(Boolean canonical, String ar } final OrderBy oBy = OrderBy.valueOf(orderby.name()); - final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc : OrderDirection.desc; + final OrderDirection oDir = (order == null || order == SortOrder.asc) ? OrderDirection.asc + : OrderDirection.desc; Set filters = new HashSet(); if (!StringUtil.isEmpty(groupId)) { @@ -322,7 +333,8 @@ public VersionSearchResults searchVersionsByContent(Boolean canonical, String ar TypedContent typedContent = TypedContent.create(content, ct); if (canonical && artifactType != null) { - String canonicalHash = contentUtils.getCanonicalContentHash(typedContent, artifactType, null, null); + String canonicalHash = contentUtils.getCanonicalContentHash(typedContent, artifactType, null, + null); filters.add(SearchFilter.ofCanonicalHash(canonicalHash)); } else if (!canonical) { String contentHash = content.getSha256Hash(); @@ -331,13 +343,13 @@ public VersionSearchResults searchVersionsByContent(Boolean canonical, String ar throw new BadRequestException(CANONICAL_QUERY_PARAM_ERROR_MESSAGE); } - VersionSearchResultsDto results = storage.searchVersions(filters, oBy, oDir, offset.intValue(), limit.intValue()); + VersionSearchResultsDto results = storage.searchVersions(filters, oBy, oDir, offset.intValue(), + limit.intValue()); return V3ApiUtil.dtoToSearchResults(results); } /** - * Make sure this is ONLY used when request instance is active. - * e.g. in actual http request + * Make sure this is ONLY used when request instance is active. e.g. in actual http request */ private String getContentType() { return request.getContentType(); diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/SystemResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/SystemResourceImpl.java index 48d5db5f1f..732c10864e 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/SystemResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/SystemResourceImpl.java @@ -1,8 +1,5 @@ package io.apicurio.registry.rest.v3; -import java.util.HashMap; -import java.util.Map; - import io.apicurio.common.apps.core.System; import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.auth.AuthConfig; @@ -23,17 +20,20 @@ import jakarta.inject.Inject; import jakarta.interceptor.Interceptors; +import java.util.HashMap; +import java.util.Map; + @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class SystemResourceImpl implements SystemResource { @Inject System system; - + @Inject AuthConfig authConfig; - + @Inject UserInterfaceConfigProperties uiConfig; @@ -44,7 +44,7 @@ public class SystemResourceImpl implements SystemResource { * @see io.apicurio.registry.rest.v3.SystemResource#getSystemInfo() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public SystemInfo getSystemInfo() { SystemInfo info = new SystemInfo(); info.setName(system.getName()); @@ -58,7 +58,7 @@ public SystemInfo getSystemInfo() { * @see io.apicurio.registry.rest.v3.SystemResource#getResourceLimits() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public Limits getResourceLimits() { var limitsConfig = registryLimitsConfiguration; var limits = new Limits(); @@ -76,26 +76,22 @@ public Limits getResourceLimits() { limits.setMaxRequestsPerSecondCount(limitsConfig.getMaxRequestsPerSecondCount()); return limits; } - + /** * @see io.apicurio.registry.rest.v3.SystemResource#getUIConfig() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public UserInterfaceConfig getUIConfig() { return UserInterfaceConfig.builder() - .ui(UserInterfaceConfigUi.builder() - .contextPath(uiConfig.contextPath) - .navPrefixPath(uiConfig.navPrefixPath) - .oaiDocsUrl(uiConfig.docsUrl) - .build()) + .ui(UserInterfaceConfigUi.builder().contextPath(uiConfig.contextPath) + .navPrefixPath(uiConfig.navPrefixPath).oaiDocsUrl(uiConfig.docsUrl).build()) .auth(uiAuthConfig()) .features(UserInterfaceConfigFeatures.builder() .readOnly("true".equals(uiConfig.featureReadOnly)) .breadcrumbs("true".equals(uiConfig.featureBreadcrumbs)) .roleManagement(authConfig.isRbacEnabled()) - .settings("true".equals(uiConfig.featureSettings)) - .build()) + .settings("true".equals(uiConfig.featureSettings)).build()) .build(); } @@ -103,8 +99,9 @@ private UserInterfaceConfigAuth uiAuthConfig() { UserInterfaceConfigAuth rval = new UserInterfaceConfigAuth(); rval.setObacEnabled(authConfig.isObacEnabled()); rval.setRbacEnabled(authConfig.isRbacEnabled()); - rval.setType(authConfig.isOidcAuthEnabled() ? UserInterfaceConfigAuth.Type.oidc : - authConfig.isBasicAuthEnabled() ? UserInterfaceConfigAuth.Type.basic : UserInterfaceConfigAuth.Type.none); + rval.setType(authConfig.isOidcAuthEnabled() ? UserInterfaceConfigAuth.Type.oidc + : authConfig.isBasicAuthEnabled() ? UserInterfaceConfigAuth.Type.basic + : UserInterfaceConfigAuth.Type.none); if (authConfig.isOidcAuthEnabled()) { Map options = new HashMap<>(); options.put("url", uiConfig.authOidcUrl); diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/UsersResourceImpl.java b/app/src/main/java/io/apicurio/registry/rest/v3/UsersResourceImpl.java index 48d4ad059f..76f8a7bbb0 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/UsersResourceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/UsersResourceImpl.java @@ -1,25 +1,23 @@ package io.apicurio.registry.rest.v3; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; -import jakarta.interceptor.Interceptors; - -import org.slf4j.Logger; - +import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.auth.AdminOverride; import io.apicurio.registry.auth.AuthConfig; import io.apicurio.registry.auth.Authorized; import io.apicurio.registry.auth.AuthorizedLevel; import io.apicurio.registry.auth.AuthorizedStyle; import io.apicurio.registry.auth.RoleBasedAccessController; -import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.metrics.health.liveness.ResponseErrorLivenessCheck; import io.apicurio.registry.metrics.health.readiness.ResponseTimeoutReadinessCheck; import io.apicurio.registry.rest.v3.beans.UserInfo; import io.quarkus.security.identity.SecurityIdentity; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.interceptor.Interceptors; +import org.slf4j.Logger; @ApplicationScoped -@Interceptors({ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class}) +@Interceptors({ ResponseErrorLivenessCheck.class, ResponseTimeoutReadinessCheck.class }) @Logged public class UsersResourceImpl implements UsersResource { @@ -42,11 +40,13 @@ public class UsersResourceImpl implements UsersResource { * @see io.apicurio.registry.rest.v3.UsersResource#getCurrentUserInfo() */ @Override - @Authorized(style=AuthorizedStyle.None, level=AuthorizedLevel.None) + @Authorized(style = AuthorizedStyle.None, level = AuthorizedLevel.None) public UserInfo getCurrentUserInfo() { UserInfo info = new UserInfo(); info.setUsername(securityIdentity.getPrincipal().getName()); - info.setDisplayName(securityIdentity.getPrincipal().getName()); // TODO need a better implementation of this, maybe use claims first_name and last_name + info.setDisplayName(securityIdentity.getPrincipal().getName()); // TODO need a better implementation + // of this, maybe use claims + // first_name and last_name if (authConfig.isRbacEnabled()) { info.setAdmin(rbac.isAdmin()); info.setDeveloper(rbac.isDeveloper()); diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/V3ApiUtil.java b/app/src/main/java/io/apicurio/registry/rest/v3/V3ApiUtil.java index fa12d1aefb..24077374cf 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/V3ApiUtil.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/V3ApiUtil.java @@ -47,6 +47,7 @@ private V3ApiUtil() { /** * Creates a jax-rs meta-data entity from the id, type, and artifactStore meta-data. + * * @param dto */ public static ArtifactMetaData dtoToArtifactMetaData(ArtifactMetaDataDto dto) { @@ -66,6 +67,7 @@ public static ArtifactMetaData dtoToArtifactMetaData(ArtifactMetaDataDto dto) { /** * Creates a jax-rs version meta-data entity from the id, type, and artifactStore meta-data. + * * @param dto */ public static VersionMetaData dtoToVersionMetaData(ArtifactVersionMetaDataDto dto) { @@ -92,7 +94,8 @@ public static VersionMetaData dtoToVersionMetaData(ArtifactVersionMetaDataDto dt * @param editableArtifactMetaData * @return the updated ArtifactMetaDataDto object */ - public static ArtifactMetaDataDto setEditableMetaDataInArtifact(ArtifactMetaDataDto dto, EditableArtifactMetaDataDto editableArtifactMetaData) { + public static ArtifactMetaDataDto setEditableMetaDataInArtifact(ArtifactMetaDataDto dto, + EditableArtifactMetaDataDto editableArtifactMetaData) { if (editableArtifactMetaData.getName() != null) { dto.setName(editableArtifactMetaData.getName()); } @@ -109,7 +112,8 @@ public static Comparator comparator(SortOrder sortOrder) { return (id1, id2) -> compare(sortOrder, id1, id2); } - public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, ArtifactMetaDataDto metaDataDto2) { + public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, + ArtifactMetaDataDto metaDataDto2) { String name1 = metaDataDto1.getName(); if (name1 == null) { name1 = metaDataDto1.getArtifactId(); @@ -118,7 +122,8 @@ public static int compare(SortOrder sortOrder, ArtifactMetaDataDto metaDataDto1, if (name2 == null) { name2 = metaDataDto2.getArtifactId(); } - return sortOrder == SortOrder.desc ? name2.compareToIgnoreCase(name1) : name1.compareToIgnoreCase(name2); + return sortOrder == SortOrder.desc ? name2.compareToIgnoreCase(name1) + : name1.compareToIgnoreCase(name2); } public static ArtifactSearchResults dtoToSearchResults(ArtifactSearchResultsDto dto) { @@ -229,12 +234,8 @@ public static GroupMetaData groupDtoToGroup(GroupMetaDataDto dto) { } public static Comment commentDtoToComment(CommentDto dto) { - return Comment.builder() - .commentId(dto.getCommentId()) - .owner(dto.getOwner()) - .createdOn(new Date(dto.getCreatedOn())) - .value(dto.getValue()) - .build(); + return Comment.builder().commentId(dto.getCommentId()).owner(dto.getOwner()) + .createdOn(new Date(dto.getCreatedOn())).value(dto.getValue()).build(); } public static RoleMapping dtoToRoleMapping(RoleMappingDto dto) { @@ -254,7 +255,8 @@ public static RoleMappingSearchResults dtoToRoleMappingSearchResults(RoleMapping return results; } - public static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigPropertyDef def, DynamicConfigPropertyDto dto) { + public static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigPropertyDef def, + DynamicConfigPropertyDto dto) { ConfigurationProperty rval = new ConfigurationProperty(); rval.setName(def.getName()); rval.setValue(dto.getValue()); @@ -265,16 +267,9 @@ public static ConfigurationProperty dtoToConfigurationProperty(DynamicConfigProp } public static BranchMetaData dtoToBranchMetaData(BranchMetaDataDto branch) { - return BranchMetaData.builder() - .groupId(branch.getGroupId()) - .artifactId(branch.getArtifactId()) - .branchId(branch.getBranchId()) - .description(branch.getDescription()) - .owner(branch.getOwner()) - .systemDefined(branch.isSystemDefined()) - .createdOn(new Date(branch.getCreatedOn())) - .modifiedBy(branch.getModifiedBy()) - .modifiedOn(new Date(branch.getModifiedOn())) - .build(); + return BranchMetaData.builder().groupId(branch.getGroupId()).artifactId(branch.getArtifactId()) + .branchId(branch.getBranchId()).description(branch.getDescription()).owner(branch.getOwner()) + .systemDefined(branch.isSystemDefined()).createdOn(new Date(branch.getCreatedOn())) + .modifiedBy(branch.getModifiedBy()).modifiedOn(new Date(branch.getModifiedOn())).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/rest/v3/shared/CommonResourceOperations.java b/app/src/main/java/io/apicurio/registry/rest/v3/shared/CommonResourceOperations.java index fbe4ad69f5..ec4d95a5e4 100644 --- a/app/src/main/java/io/apicurio/registry/rest/v3/shared/CommonResourceOperations.java +++ b/app/src/main/java/io/apicurio/registry/rest/v3/shared/CommonResourceOperations.java @@ -5,11 +5,11 @@ import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.storage.dto.ContentWrapperDto; import io.apicurio.registry.types.Current; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; import java.util.List; import java.util.stream.Collectors; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; @ApplicationScoped public class CommonResourceOperations { @@ -20,8 +20,7 @@ public class CommonResourceOperations { public List getReferencesByContentHash(String contentHash) { ContentWrapperDto artifact = storage.getContentByHash(contentHash); - return artifact.getReferences().stream() - .map(V3ApiUtil::referenceDtoToReference) + return artifact.getReferences().stream().map(V3ApiUtil::referenceDtoToReference) .collect(Collectors.toList()); } } diff --git a/app/src/main/java/io/apicurio/registry/rules/RuleApplicationType.java b/app/src/main/java/io/apicurio/registry/rules/RuleApplicationType.java index bd03161d01..6ad977172c 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RuleApplicationType.java +++ b/app/src/main/java/io/apicurio/registry/rules/RuleApplicationType.java @@ -3,5 +3,5 @@ public enum RuleApplicationType { CREATE, UPDATE; - + } diff --git a/app/src/main/java/io/apicurio/registry/rules/RuleContext.java b/app/src/main/java/io/apicurio/registry/rules/RuleContext.java index 06f0734c10..8a847a8b53 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RuleContext.java +++ b/app/src/main/java/io/apicurio/registry/rules/RuleContext.java @@ -11,9 +11,8 @@ import java.util.Map; /** - * Contains all of the information needed by a rule executor, including the rule-specific - * configuration, current and updated content, and any other meta-data needed. - * + * Contains all of the information needed by a rule executor, including the rule-specific configuration, + * current and updated content, and any other meta-data needed. */ @Getter @Setter diff --git a/app/src/main/java/io/apicurio/registry/rules/RuleExecutor.java b/app/src/main/java/io/apicurio/registry/rules/RuleExecutor.java index e80910f7f1..629f998fa0 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RuleExecutor.java +++ b/app/src/main/java/io/apicurio/registry/rules/RuleExecutor.java @@ -1,15 +1,16 @@ package io.apicurio.registry.rules; /** - * This interface is used to execute/apply a specific rule. Each rule supported by - * the registry will have an implementation of this interface, where the logic specific - * to the rule is applied. For example, the Validity rule will have an implementation. + * This interface is used to execute/apply a specific rule. Each rule supported by the registry will have an + * implementation of this interface, where the logic specific to the rule is applied. For example, the + * Validity rule will have an implementation. */ public interface RuleExecutor { - + /** - * Executes the logic of the rule against the given context. The context - * contains all data and meta-data necessary to execute the rule logic. + * Executes the logic of the rule against the given context. The context contains all data and meta-data + * necessary to execute the rule logic. + * * @param context * @throws RuleViolationException */ diff --git a/app/src/main/java/io/apicurio/registry/rules/RuleExecutorFactory.java b/app/src/main/java/io/apicurio/registry/rules/RuleExecutorFactory.java index 9ed3c1aaa1..c777cb1a46 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RuleExecutorFactory.java +++ b/app/src/main/java/io/apicurio/registry/rules/RuleExecutorFactory.java @@ -1,19 +1,18 @@ package io.apicurio.registry.rules; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; - import io.apicurio.registry.rules.compatibility.CompatibilityRuleExecutor; import io.apicurio.registry.rules.integrity.IntegrityRuleExecutor; import io.apicurio.registry.rules.validity.ValidityRuleExecutor; import io.apicurio.registry.types.RuleType; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; /** * Creates a rule executor from a {@link RuleType}. */ @ApplicationScoped public class RuleExecutorFactory { - + @Inject CompatibilityRuleExecutor compatibility; @Inject diff --git a/app/src/main/java/io/apicurio/registry/rules/RulesConfiguration.java b/app/src/main/java/io/apicurio/registry/rules/RulesConfiguration.java index b5c952047c..dca5d36402 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RulesConfiguration.java +++ b/app/src/main/java/io/apicurio/registry/rules/RulesConfiguration.java @@ -1,9 +1,9 @@ package io.apicurio.registry.rules; import io.apicurio.registry.utils.RegistryProperties; - import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.inject.Produces; + import java.util.Properties; @ApplicationScoped @@ -11,7 +11,8 @@ public class RulesConfiguration { @Produces @ApplicationScoped - public RulesProperties rulesProperties(@RegistryProperties(value = {"apicurio.rules.global"}) Properties properties) { + public RulesProperties rulesProperties( + @RegistryProperties(value = { "apicurio.rules.global" }) Properties properties) { return new RulesPropertiesImpl(properties); } diff --git a/app/src/main/java/io/apicurio/registry/rules/RulesProperties.java b/app/src/main/java/io/apicurio/registry/rules/RulesProperties.java index ee1af4f179..5dfad062b7 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RulesProperties.java +++ b/app/src/main/java/io/apicurio/registry/rules/RulesProperties.java @@ -6,9 +6,8 @@ import java.util.List; /** - * A service used to retrieve the default global rules that have been set via registry.rules.global configuration - * properties. E.g.: - * + * A service used to retrieve the default global rules that have been set via registry.rules.global + * configuration properties. E.g.: * %prod.registry.rules.global.compatibility=BACKWARDS * %prod.registry.rules.global.validity=FULL * @@ -16,19 +15,21 @@ public interface RulesProperties { /** - * Get the list of configured default global RuleType enums. A list of RuleType enums can be supplied that will - * be filtered out of the returned list. + * Get the list of configured default global RuleType enums. A list of RuleType enums can be supplied that + * will be filtered out of the returned list. * - * @param excludeRulesFilter a list of RuleType enums to filter from the returned list. If null, the entire - * configured list of default global RuleTypes is returned. - * @return The list of configured default global RuleTypes with any matching the excludeRules list removed. + * @param excludeRulesFilter a list of RuleType enums to filter from the returned list. If null, the + * entire configured list of default global RuleTypes is returned. + * @return The list of configured default global RuleTypes with any matching the excludeRules list + * removed. */ List getFilteredDefaultGlobalRules(List excludeRulesFilter); /** * Whether the supplied RuleType has been configured as a global rule. * - * @return true if the a default global rule has been configured for the supplied RuleType, false otherwise. + * @return true if the a default global rule has been configured for the supplied RuleType, false + * otherwise. */ boolean isDefaultGlobalRuleConfigured(RuleType ruleType); @@ -36,7 +37,7 @@ public interface RulesProperties { * Get the default global RuleConfigurationDto for the supplied RuleType. * * @return The default global RuleConfigurationDto for the supplied RuleType or null if the RuleType has - * not been configured. + * not been configured. */ RuleConfigurationDto getDefaultGlobalRuleConfiguration(RuleType ruleType); diff --git a/app/src/main/java/io/apicurio/registry/rules/RulesPropertiesImpl.java b/app/src/main/java/io/apicurio/registry/rules/RulesPropertiesImpl.java index b0ef5f8190..de017c6fb0 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RulesPropertiesImpl.java +++ b/app/src/main/java/io/apicurio/registry/rules/RulesPropertiesImpl.java @@ -16,14 +16,16 @@ public class RulesPropertiesImpl implements RulesProperties { public RulesPropertiesImpl(Properties properties) { this.properties = properties; this.defaultGlobalRules = properties.stringPropertyNames().stream() - .collect(Collectors.toMap(rulePropertyName -> RuleType.fromValue(rulePropertyName.toUpperCase()), properties::getProperty)); + .collect(Collectors.toMap( + rulePropertyName -> RuleType.fromValue(rulePropertyName.toUpperCase()), + properties::getProperty)); } @Override public List getFilteredDefaultGlobalRules(List excludeRulesFilter) { return defaultGlobalRules.keySet().stream() - .filter(ruleType -> excludeRulesFilter == null || !excludeRulesFilter.contains(ruleType)) - .collect(Collectors.toList()); + .filter(ruleType -> excludeRulesFilter == null || !excludeRulesFilter.contains(ruleType)) + .collect(Collectors.toList()); } @Override @@ -34,7 +36,7 @@ public boolean isDefaultGlobalRuleConfigured(RuleType ruleType) { @Override public RuleConfigurationDto getDefaultGlobalRuleConfiguration(RuleType ruleType) { RuleConfigurationDto ruleConfigurationDto = null; - if(defaultGlobalRules.containsKey(ruleType)) { + if (defaultGlobalRules.containsKey(ruleType)) { ruleConfigurationDto = new RuleConfigurationDto(); ruleConfigurationDto.setConfiguration(defaultGlobalRules.get(ruleType)); } diff --git a/app/src/main/java/io/apicurio/registry/rules/RulesService.java b/app/src/main/java/io/apicurio/registry/rules/RulesService.java index 46fa7cd679..dcf3427fb9 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RulesService.java +++ b/app/src/main/java/io/apicurio/registry/rules/RulesService.java @@ -8,14 +8,14 @@ import java.util.Map; /** - * A service used to apply configured rules to a given content update. In other words, - * when artifact content is being created or updated, this service is used to apply - * any rules configured for the artifact. + * A service used to apply configured rules to a given content update. In other words, when artifact content + * is being created or updated, this service is used to apply any rules configured for the artifact. */ public interface RulesService { /** * Applies all configured rules to check whether a content update for an artifact is allowed. + * * @param groupId * @param artifactId * @param artifactType @@ -26,11 +26,12 @@ public interface RulesService { * @throws RuleViolationException */ public void applyRules(String groupId, String artifactId, String artifactType, TypedContent content, - RuleApplicationType ruleApplicationType, List references, - Map resolvedReferences) throws RuleViolationException; + RuleApplicationType ruleApplicationType, List references, + Map resolvedReferences) throws RuleViolationException; /** * Applies a single, specific rule to the content update for the given artifact. + * * @param groupId * @param artifactId * @param artifactType @@ -43,12 +44,13 @@ public void applyRules(String groupId, String artifactId, String artifactType, T * @throws RuleViolationException */ public void applyRule(String groupId, String artifactId, String artifactType, TypedContent content, - RuleType ruleType, String ruleConfiguration, RuleApplicationType ruleApplicationType, - List references, Map resolvedReferences) + RuleType ruleType, String ruleConfiguration, RuleApplicationType ruleApplicationType, + List references, Map resolvedReferences) throws RuleViolationException; /** * Applies configured rules to the content update, relative to ANY artifact version. + * * @param groupId * @param artifactId * @param artifactVersion @@ -59,7 +61,6 @@ public void applyRule(String groupId, String artifactId, String artifactType, Ty * @throws RuleViolationException */ public void applyRules(String groupId, String artifactId, String artifactVersion, String artifactType, - TypedContent updatedContent, List references, - Map resolvedReferences) - throws RuleViolationException; + TypedContent updatedContent, List references, + Map resolvedReferences) throws RuleViolationException; } diff --git a/app/src/main/java/io/apicurio/registry/rules/RulesServiceImpl.java b/app/src/main/java/io/apicurio/registry/rules/RulesServiceImpl.java index 5804c70423..8ebe6eb5ee 100644 --- a/app/src/main/java/io/apicurio/registry/rules/RulesServiceImpl.java +++ b/app/src/main/java/io/apicurio/registry/rules/RulesServiceImpl.java @@ -19,7 +19,6 @@ /** * Implements the {@link RulesService} interface. - * */ @ApplicationScoped public class RulesServiceImpl implements RulesService { @@ -38,12 +37,13 @@ public class RulesServiceImpl implements RulesService { ArtifactTypeUtilProviderFactory providerFactory; /** - * @see io.apicurio.registry.rules.RulesService#applyRules(String, String, String, TypedContent, RuleApplicationType, List, Map) + * @see io.apicurio.registry.rules.RulesService#applyRules(String, String, String, TypedContent, + * RuleApplicationType, List, Map) */ @Override public void applyRules(String groupId, String artifactId, String artifactType, TypedContent content, - RuleApplicationType ruleApplicationType, List references, - Map resolvedReferences) throws RuleViolationException { + RuleApplicationType ruleApplicationType, List references, + Map resolvedReferences) throws RuleViolationException { @SuppressWarnings("unchecked") List rules = Collections.EMPTY_LIST; if (ruleApplicationType == RuleApplicationType.UPDATE) { @@ -51,30 +51,34 @@ public void applyRules(String groupId, String artifactId, String artifactType, T } LazyContentList currentContent = null; if (ruleApplicationType == RuleApplicationType.UPDATE) { - currentContent = new LazyContentList(storage, storage.getEnabledArtifactContentIds(groupId, artifactId)); + currentContent = new LazyContentList(storage, + storage.getEnabledArtifactContentIds(groupId, artifactId)); } else { currentContent = new LazyContentList(storage, Collections.emptyList()); } - applyGlobalAndArtifactRules(groupId, artifactId, artifactType, currentContent, content, rules, references, resolvedReferences); + applyGlobalAndArtifactRules(groupId, artifactId, artifactType, currentContent, content, rules, + references, resolvedReferences); } private void applyGlobalAndArtifactRules(String groupId, String artifactId, String artifactType, - List currentContent, TypedContent updatedContent, - List artifactRules, List references, - Map resolvedReferences) { + List currentContent, TypedContent updatedContent, List artifactRules, + List references, Map resolvedReferences) { Map globalOrArtifactRulesMap = artifactRules.stream() - .collect(Collectors.toMap(ruleType -> ruleType, ruleType -> storage.getArtifactRule(groupId, artifactId, ruleType))); + .collect(Collectors.toMap(ruleType -> ruleType, + ruleType -> storage.getArtifactRule(groupId, artifactId, ruleType))); if (globalOrArtifactRulesMap.isEmpty()) { List globalRules = storage.getGlobalRules(); globalOrArtifactRulesMap = globalRules.stream() .collect(Collectors.toMap(ruleType -> ruleType, storage::getGlobalRule)); - // Add any default global rules to the map (after filtering out any global rules from artifactStore) - Map filteredDefaultGlobalRulesMap = rulesProperties.getFilteredDefaultGlobalRules(globalRules).stream() - .collect(Collectors.toMap(ruleType -> ruleType, rulesProperties::getDefaultGlobalRuleConfiguration)); + // Add any default global rules to the map (after filtering out any global rules from + // artifactStore) + Map filteredDefaultGlobalRulesMap = rulesProperties + .getFilteredDefaultGlobalRules(globalRules).stream().collect(Collectors + .toMap(ruleType -> ruleType, rulesProperties::getDefaultGlobalRuleConfiguration)); globalOrArtifactRulesMap.putAll(filteredDefaultGlobalRulesMap); } @@ -84,56 +88,58 @@ private void applyGlobalAndArtifactRules(String groupId, String artifactId, Stri for (RuleType ruleType : globalOrArtifactRulesMap.keySet()) { applyRule(groupId, artifactId, artifactType, currentContent, updatedContent, ruleType, - globalOrArtifactRulesMap.get(ruleType).getConfiguration(), references, resolvedReferences); + globalOrArtifactRulesMap.get(ruleType).getConfiguration(), references, + resolvedReferences); } } /** - * @see io.apicurio.registry.rules.RulesService#applyRule(String, String, String, TypedContent, RuleType, String, RuleApplicationType, List, Map) + * @see io.apicurio.registry.rules.RulesService#applyRule(String, String, String, TypedContent, RuleType, + * String, RuleApplicationType, List, Map) */ @Override public void applyRule(String groupId, String artifactId, String artifactType, TypedContent content, - RuleType ruleType, String ruleConfiguration, RuleApplicationType ruleApplicationType, - List references, Map resolvedReferences) + RuleType ruleType, String ruleConfiguration, RuleApplicationType ruleApplicationType, + List references, Map resolvedReferences) throws RuleViolationException { LazyContentList currentContent = null; if (ruleApplicationType == RuleApplicationType.UPDATE) { - currentContent = new LazyContentList(storage, storage.getEnabledArtifactContentIds(groupId, artifactId)); + currentContent = new LazyContentList(storage, + storage.getEnabledArtifactContentIds(groupId, artifactId)); } applyRule(groupId, artifactId, artifactType, currentContent, content, ruleType, ruleConfiguration, references, resolvedReferences); } /** - * Applies a single rule. Throws an exception if the rule is violated. + * Applies a single rule. Throws an exception if the rule is violated. */ - private void applyRule(String groupId, String artifactId, String artifactType, List currentContent, - TypedContent updatedContent, RuleType ruleType, String ruleConfiguration, - List references, Map resolvedReferences) { + private void applyRule(String groupId, String artifactId, String artifactType, + List currentContent, TypedContent updatedContent, RuleType ruleType, + String ruleConfiguration, List references, + Map resolvedReferences) { RuleExecutor executor = factory.createExecutor(ruleType); - RuleContext context = RuleContext.builder() - .groupId(groupId) - .artifactId(artifactId) - .artifactType(artifactType) - .currentContent(currentContent) - .updatedContent(updatedContent) - .configuration(ruleConfiguration) - .references(references) - .resolvedReferences(resolvedReferences) - .build(); + RuleContext context = RuleContext.builder().groupId(groupId).artifactId(artifactId) + .artifactType(artifactType).currentContent(currentContent).updatedContent(updatedContent) + .configuration(ruleConfiguration).references(references) + .resolvedReferences(resolvedReferences).build(); executor.execute(context); } /** - * @see io.apicurio.registry.rules.RulesService#applyRules(String, String, String, String, TypedContent, List, Map) + * @see io.apicurio.registry.rules.RulesService#applyRules(String, String, String, String, TypedContent, + * List, Map) */ @Override public void applyRules(String groupId, String artifactId, String artifactVersion, String artifactType, - TypedContent updatedContent, List references, - Map resolvedReferences) throws RuleViolationException { - StoredArtifactVersionDto versionContent = storage.getArtifactVersionContent(groupId, artifactId, artifactVersion); - TypedContent typedVersionContent = TypedContent.create(versionContent.getContent(), versionContent.getContentType()); - applyGlobalAndArtifactRules(groupId, artifactId, artifactType, Collections.singletonList(typedVersionContent), - updatedContent, storage.getArtifactRules(groupId, artifactId), references, resolvedReferences); + TypedContent updatedContent, List references, + Map resolvedReferences) throws RuleViolationException { + StoredArtifactVersionDto versionContent = storage.getArtifactVersionContent(groupId, artifactId, + artifactVersion); + TypedContent typedVersionContent = TypedContent.create(versionContent.getContent(), + versionContent.getContentType()); + applyGlobalAndArtifactRules(groupId, artifactId, artifactType, + Collections.singletonList(typedVersionContent), updatedContent, + storage.getArtifactRules(groupId, artifactId), references, resolvedReferences); } } diff --git a/app/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityRuleExecutor.java b/app/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityRuleExecutor.java index 6028f41e76..afe7e14698 100644 --- a/app/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityRuleExecutor.java +++ b/app/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityRuleExecutor.java @@ -21,10 +21,9 @@ import static java.util.Collections.emptyList; /** - * Rule executor for the "Compatibility" rule. The Compatibility Rule is responsible - * for ensuring that the updated content does not violate the configured compatibility - * level. Levels include e.g. Backward compatibility. - * + * Rule executor for the "Compatibility" rule. The Compatibility Rule is responsible for ensuring that the + * updated content does not violate the configured compatibility level. Levels include e.g. Backward + * compatibility. */ @ApplicationScoped @Logged @@ -42,24 +41,25 @@ public void execute(RuleContext context) throws RuleViolationException { ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(context.getArtifactType()); CompatibilityChecker checker = provider.getCompatibilityChecker(); List existingArtifacts = context.getCurrentContent() != null - ? context.getCurrentContent() : emptyList(); - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( - level, - existingArtifacts, - context.getUpdatedContent(), - context.getResolvedReferences()); + ? context.getCurrentContent() : emptyList(); + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(level, + existingArtifacts, context.getUpdatedContent(), context.getResolvedReferences()); if (!compatibilityExecutionResult.isCompatible()) { - throw new RuleViolationException(String.format("Incompatible artifact: %s [%s], num of incompatible diffs: {%s}, list of diff types: %s", + throw new RuleViolationException(String.format( + "Incompatible artifact: %s [%s], num of incompatible diffs: {%s}, list of diff types: %s", context.getArtifactId(), context.getArtifactType(), - compatibilityExecutionResult.getIncompatibleDifferences().size(), outputReadableCompatabilityDiffs(compatibilityExecutionResult.getIncompatibleDifferences())), + compatibilityExecutionResult.getIncompatibleDifferences().size(), + outputReadableCompatabilityDiffs( + compatibilityExecutionResult.getIncompatibleDifferences())), RuleType.COMPATIBILITY, context.getConfiguration(), transformCompatibilityDiffs(compatibilityExecutionResult.getIncompatibleDifferences())); } } /** - * Convert the set of compatibility differences into a collection of rule violation causes - * for return to the user. + * Convert the set of compatibility differences into a collection of rule violation causes for return to + * the user. + * * @param differences */ private Set transformCompatibilityDiffs(Set differences) { @@ -78,7 +78,8 @@ private List outputReadableCompatabilityDiffs(Set res = new ArrayList(); for (CompatibilityDifference diff : differences) { - res.add(diff.asRuleViolation().getDescription() + " at " + diff.asRuleViolation().getContext()); + res.add(diff.asRuleViolation().getDescription() + " at " + + diff.asRuleViolation().getContext()); } return res; } else { diff --git a/app/src/main/java/io/apicurio/registry/rules/integrity/IntegrityRuleExecutor.java b/app/src/main/java/io/apicurio/registry/rules/integrity/IntegrityRuleExecutor.java index ba0a364881..6a1c30e8ae 100644 --- a/app/src/main/java/io/apicurio/registry/rules/integrity/IntegrityRuleExecutor.java +++ b/app/src/main/java/io/apicurio/registry/rules/integrity/IntegrityRuleExecutor.java @@ -25,14 +25,14 @@ public class IntegrityRuleExecutor implements RuleExecutor { @Inject ArtifactTypeUtilProviderFactory factory; - + /** * @see io.apicurio.registry.rules.RuleExecutor#execute(io.apicurio.registry.rules.RuleContext) */ @Override public void execute(RuleContext context) throws RuleViolationException { Set levels = parseConfig(context.getConfiguration()); - + // Make sure that the user has included mappings for all references in the content of the artifact. if (levels.contains(IntegrityLevel.FULL) || levels.contains(IntegrityLevel.ALL_REFS_MAPPED)) { // Not yet implemented - needs artifact type specific logic to extract the full list of @@ -52,7 +52,8 @@ public void execute(RuleContext context) throws RuleViolationException { } private void verifyAllReferencesHaveMappings(RuleContext context) throws RuleViolationException { - ArtifactTypeUtilProvider artifactTypeProvider = factory.getArtifactTypeProvider(context.getArtifactType()); + ArtifactTypeUtilProvider artifactTypeProvider = factory + .getArtifactTypeProvider(context.getArtifactType()); ContentValidator validator = artifactTypeProvider.getContentValidator(); validator.validateReferences(context.getUpdatedContent(), context.getReferences()); } @@ -60,21 +61,23 @@ private void verifyAllReferencesHaveMappings(RuleContext context) throws RuleVio private void validateReferencesExist(RuleContext context) throws RuleViolationException { List references = context.getReferences(); Map resolvedReferences = context.getResolvedReferences(); - + Set causes = new HashSet<>(); references.forEach(ref -> { if (!resolvedReferences.containsKey(ref.getName())) { RuleViolation violation = new RuleViolation(); violation.setContext(ref.getName()); - violation.setDescription(String.format("Referenced artifact (%s/%s @ %s) does not yet exist in the registry.", ref.getGroupId(), ref.getArtifactId(), ref.getVersion())); + violation.setDescription( + String.format("Referenced artifact (%s/%s @ %s) does not yet exist in the registry.", + ref.getGroupId(), ref.getArtifactId(), ref.getVersion())); causes.add(violation); } }); if (!causes.isEmpty()) { - throw new RuleViolationException("Referenced artifact does not exist.", RuleType.INTEGRITY, + throw new RuleViolationException("Referenced artifact does not exist.", RuleType.INTEGRITY, IntegrityLevel.REFS_EXIST.name(), causes); } - + } private void checkForDuplicateReferences(RuleContext context) throws RuleViolationException { @@ -86,18 +89,18 @@ private void checkForDuplicateReferences(RuleContext context) throws RuleViolati if (refNames.contains(ref.getName())) { RuleViolation violation = new RuleViolation(); violation.setContext(ref.getName()); - violation.setDescription("Duplicate mapping for artifact reference with name: " + ref.getName()); + violation.setDescription( + "Duplicate mapping for artifact reference with name: " + ref.getName()); causes.add(violation); } refNames.add(ref.getName()); }); if (!causes.isEmpty()) { - throw new RuleViolationException("Duplicate artifact reference(s) detected.", RuleType.INTEGRITY, - IntegrityLevel.NO_DUPLICATES.name(), causes); + throw new RuleViolationException("Duplicate artifact reference(s) detected.", + RuleType.INTEGRITY, IntegrityLevel.NO_DUPLICATES.name(), causes); } } } - /** * @param configuration diff --git a/app/src/main/java/io/apicurio/registry/rules/validity/ValidityRuleExecutor.java b/app/src/main/java/io/apicurio/registry/rules/validity/ValidityRuleExecutor.java index 2c2e8df46f..77a8a2f955 100644 --- a/app/src/main/java/io/apicurio/registry/rules/validity/ValidityRuleExecutor.java +++ b/app/src/main/java/io/apicurio/registry/rules/validity/ValidityRuleExecutor.java @@ -1,14 +1,13 @@ package io.apicurio.registry.rules.validity; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; - import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.rules.RuleContext; import io.apicurio.registry.rules.RuleExecutor; import io.apicurio.registry.rules.RuleViolationException; import io.apicurio.registry.types.provider.ArtifactTypeUtilProvider; import io.apicurio.registry.types.provider.ArtifactTypeUtilProviderFactory; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; @ApplicationScoped @Logged @@ -16,7 +15,7 @@ public class ValidityRuleExecutor implements RuleExecutor { @Inject ArtifactTypeUtilProviderFactory factory; - + /** * @see io.apicurio.registry.rules.RuleExecutor#execute(io.apicurio.registry.rules.RuleContext) */ diff --git a/app/src/main/java/io/apicurio/registry/services/DisabledApisMatcherService.java b/app/src/main/java/io/apicurio/registry/services/DisabledApisMatcherService.java index 6d91342cfd..cb2d5875d3 100644 --- a/app/src/main/java/io/apicurio/registry/services/DisabledApisMatcherService.java +++ b/app/src/main/java/io/apicurio/registry/services/DisabledApisMatcherService.java @@ -47,7 +47,8 @@ public void init(@Observes StartupEvent ev) { public boolean isDisabled(String requestPath) { for (Pattern pattern : disabledPatternsList) { if (pattern.matcher(requestPath).matches()) { - log.warn("Request {} is rejected because it's disabled by pattern {}", requestPath, pattern.pattern()); + log.warn("Request {} is rejected because it's disabled by pattern {}", requestPath, + pattern.pattern()); return true; } } diff --git a/app/src/main/java/io/apicurio/registry/services/RegistryConfigSource.java b/app/src/main/java/io/apicurio/registry/services/RegistryConfigSource.java index 6cc4609448..3b53029e17 100644 --- a/app/src/main/java/io/apicurio/registry/services/RegistryConfigSource.java +++ b/app/src/main/java/io/apicurio/registry/services/RegistryConfigSource.java @@ -11,7 +11,6 @@ /** * ConfigSource that turns env vars into plain properties. *

- * */ public class RegistryConfigSource implements ConfigSource { private Map properties; diff --git a/app/src/main/java/io/apicurio/registry/services/http/ErrorHttpResponse.java b/app/src/main/java/io/apicurio/registry/services/http/ErrorHttpResponse.java index 8fa0da912d..36d803f079 100644 --- a/app/src/main/java/io/apicurio/registry/services/http/ErrorHttpResponse.java +++ b/app/src/main/java/io/apicurio/registry/services/http/ErrorHttpResponse.java @@ -1,8 +1,7 @@ package io.apicurio.registry.services.http; -import jakarta.ws.rs.core.Response; - import io.apicurio.registry.rest.v3.beans.Error; +import jakarta.ws.rs.core.Response; public class ErrorHttpResponse { diff --git a/app/src/main/java/io/apicurio/registry/services/http/RegistryExceptionMapperService.java b/app/src/main/java/io/apicurio/registry/services/http/RegistryExceptionMapperService.java index 3e550c9279..eb0511a5c3 100644 --- a/app/src/main/java/io/apicurio/registry/services/http/RegistryExceptionMapperService.java +++ b/app/src/main/java/io/apicurio/registry/services/http/RegistryExceptionMapperService.java @@ -85,7 +85,8 @@ public class RegistryExceptionMapperService { map.put(LimitExceededException.class, HTTP_CONFLICT); map.put(LogConfigurationNotFoundException.class, HTTP_NOT_FOUND); map.put(MissingRequiredParameterException.class, HTTP_BAD_REQUEST); - map.put(NotAllowedException.class, HTTP_CONFLICT); // We're using 409 instead of 403 to reserve the latter for authx only. + map.put(NotAllowedException.class, HTTP_CONFLICT); // We're using 409 instead of 403 to reserve the + // latter for authx only. map.put(NotAuthorizedException.class, HTTP_FORBIDDEN); map.put(NotFoundException.class, HTTP_NOT_FOUND); map.put(ParametersConflictException.class, HTTP_CONFLICT); @@ -130,7 +131,7 @@ public ErrorHttpResponse mapException(Throwable t) { if (code == HTTP_INTERNAL_ERROR) { // If the error is not something we should ignore, then we report it to the liveness object - // and log it. Otherwise we only log it if debug logging is enabled. + // and log it. Otherwise we only log it if debug logging is enabled. if (!livenessUtil.isIgnoreError(t)) { liveness.suspectWithException(t); } @@ -183,8 +184,7 @@ private List toRestCauses(Set violations) { } /** - * Gets the full stack trace for the given exception and returns it as a - * string. + * Gets the full stack trace for the given exception and returns it as a string. * * @param t */ diff --git a/app/src/main/java/io/apicurio/registry/storage/RegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/RegistryStorage.java index 72a3653fa6..ea0ddd333b 100644 --- a/app/src/main/java/io/apicurio/registry/storage/RegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/RegistryStorage.java @@ -62,7 +62,6 @@ /** * The artifactStore layer for the registry. - * */ public interface RegistryStorage extends DynamicConfigStorage { @@ -77,20 +76,16 @@ public interface RegistryStorage extends DynamicConfigStorage { void initialize(); /** - * Is the storage initialized and ready to be used? - * This state SHOULD NOT change again during operation, - * and is used for K8s readiness probes, among other things. - * This operation should be fast. + * Is the storage initialized and ready to be used? This state SHOULD NOT change again during operation, + * and is used for K8s readiness probes, among other things. This operation should be fast. * * @return true if yes, false if no */ boolean isReady(); /** - * Is the storage ready AND alive, meaning able to be used? - * This state MAY change multiple times during operation, - * and is used for K8s liveness probes, among other things. - * This operation should be fast. + * Is the storage ready AND alive, meaning able to be used? This state MAY change multiple times during + * operation, and is used for K8s liveness probes, among other things. This operation should be fast. * * @return true if yes, false if no */ @@ -102,25 +97,26 @@ public interface RegistryStorage extends DynamicConfigStorage { boolean isReadOnly(); /** - * Create a new artifact in the storage, with or without an initial/first version. Throws an exception if the - * artifact already exists. The first version information can be null, in which case an empty artifact (no versions) - * is created. - * - * Returns the metadata of the newly created artifact and (optionally) the metadata of the first version. + * Create a new artifact in the storage, with or without an initial/first version. Throws an exception if + * the artifact already exists. The first version information can be null, in which case an empty artifact + * (no versions) is created. Returns the metadata of the newly created artifact and (optionally) the + * metadata of the first version. */ - Pair createArtifact(String groupId, String artifactId, String artifactType, - EditableArtifactMetaDataDto artifactMetaData, String version, ContentWrapperDto versionContent, - EditableVersionMetaDataDto versionMetaData, List versionBranches) throws ArtifactAlreadyExistsException, RegistryStorageException; + Pair createArtifact(String groupId, String artifactId, + String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, + ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws ArtifactAlreadyExistsException, RegistryStorageException; /** * Deletes an artifact by its group and unique id. Returns list of artifact versions. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - List deleteArtifact(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException; + List deleteArtifact(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException; /** * Deletes all artifacts in the given group. DOES NOT delete the group. @@ -131,26 +127,28 @@ Pair createArtifact(String grou void deleteArtifacts(String groupId) throws RegistryStorageException; /** - * Gets some artifact content by the unique contentId. This method of getting content - * from storage does not allow extra meta-data to be returned, because the contentId only - * points to a piece of content/data - it is divorced from any artifact version. + * Gets some artifact content by the unique contentId. This method of getting content from storage does + * not allow extra meta-data to be returned, because the contentId only points to a piece of content/data + * - it is divorced from any artifact version. * * @param contentId * @throws ContentNotFoundException * @throws RegistryStorageException */ - ContentWrapperDto getContentById(long contentId) throws ContentNotFoundException, RegistryStorageException; + ContentWrapperDto getContentById(long contentId) + throws ContentNotFoundException, RegistryStorageException; /** - * Gets some artifact content by the SHA-256 hash of that content. This method of getting content - * from storage does not allow extra meta-data to be returned, because the content hash only - * points to a piece of content/data - it is divorced from any artifact version. + * Gets some artifact content by the SHA-256 hash of that content. This method of getting content from + * storage does not allow extra meta-data to be returned, because the content hash only points to a piece + * of content/data - it is divorced from any artifact version. * * @param contentHash * @throws ContentNotFoundException * @throws RegistryStorageException */ - ContentWrapperDto getContentByHash(String contentHash) throws ContentNotFoundException, RegistryStorageException; + ContentWrapperDto getContentByHash(String contentHash) + throws ContentNotFoundException, RegistryStorageException; /** * Get a list of all artifact versions that refer to the same content. @@ -168,10 +166,9 @@ Pair createArtifact(String grou List getEnabledArtifactContentIds(String groupId, String artifactId); /** - * Creates a new version of an artifact. Returns a map of meta-data generated by the artifactStore layer, such as the generated, - * globally unique globalId of the new version. - * - * Note: the artifactType is passed in because it is needed when generating canonical content hashes. + * Creates a new version of an artifact. Returns a map of meta-data generated by the artifactStore layer, + * such as the generated, globally unique globalId of the new version. Note: the artifactType is passed in + * because it is needed when generating canonical content hashes. * * @param groupId * @param artifactId @@ -181,15 +178,14 @@ Pair createArtifact(String grou * @param metaData * @param branches */ - ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, String artifactType, - ContentWrapperDto content, EditableVersionMetaDataDto metaData, List branches) + ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, + String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, + List branches) throws ArtifactNotFoundException, VersionAlreadyExistsException, RegistryStorageException; /** - * Get all artifact ids. - * --- - * Note: This should only be used in older APIs such as the registry V1 REST API and the Confluent API - * --- + * Get all artifact ids. --- Note: This should only be used in older APIs such as the registry V1 REST API + * and the Confluent API --- * * @param limit the limit of artifacts * @return all artifact ids @@ -199,17 +195,18 @@ ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifact /** * Search artifacts by given criteria * - * @param filters the set of filters to apply when searching - * @param orderBy the field to order by + * @param filters the set of filters to apply when searching + * @param orderBy the field to order by * @param orderDirection the direction to order the results - * @param offset the number of artifacts to skip - * @param limit the result size limit + * @param offset the number of artifacts to skip + * @param limit the result size limit */ - ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, OrderDirection orderDirection, - int offset, int limit); + ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, + OrderDirection orderDirection, int offset, int limit); /** * Get metadata for an artifact using GA information. + * * @param groupId * @param artifactId * @throws ArtifactNotFoundException @@ -221,7 +218,7 @@ ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId) /** * Gets the metadata of the version that matches content. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param canonical * @param content @@ -229,37 +226,40 @@ ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId) * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, boolean canonical, - TypedContent content, List artifactReferences) throws ArtifactNotFoundException, RegistryStorageException; + ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, + boolean canonical, TypedContent content, List artifactReferences) + throws ArtifactNotFoundException, RegistryStorageException; /** - * Updates the stored meta-data for an artifact by group and ID. Only the client-editable meta-data can be updated. Client - * editable meta-data includes e.g. name and description + * Updates the stored meta-data for an artifact by group and ID. Only the client-editable meta-data can be + * updated. Client editable meta-data includes e.g. name and description * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param metaData * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException; + void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) + throws ArtifactNotFoundException, RegistryStorageException; /** - * Gets a list of rules configured for a specific Artifact (by group and ID). This will return only the names of the - * rules. + * Gets a list of rules configured for a specific Artifact (by group and ID). This will return only the + * names of the rules. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - List getArtifactRules(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException; + List getArtifactRules(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException; /** - * Creates an artifact rule for a specific Artifact. If the named rule already exists for the artifact, then - * this should fail. + * Creates an artifact rule for a specific Artifact. If the named rule already exists for the artifact, + * then this should fail. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param rule * @param config @@ -273,17 +273,18 @@ void createArtifactRule(String groupId, String artifactId, RuleType rule, RuleCo /** * Deletes all rules stored/configured for the artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - void deleteArtifactRules(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException; + void deleteArtifactRules(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException; /** * Gets all of the information for a single rule configured on a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param rule * @throws ArtifactNotFoundException @@ -296,7 +297,7 @@ RuleConfigurationDto getArtifactRule(String groupId, String artifactId, RuleType /** * Updates the configuration information for a single rule on a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param rule * @param config @@ -310,48 +311,51 @@ void updateArtifactRule(String groupId, String artifactId, RuleType rule, RuleCo /** * Deletes a single stored/configured rule for a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param rule * @throws ArtifactNotFoundException * @throws RuleNotFoundException * @throws RegistryStorageException */ - void deleteArtifactRule(String groupId, String artifactId, RuleType rule) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException; + void deleteArtifactRule(String groupId, String artifactId, RuleType rule) + throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException; /** * Gets a sorted set of all artifact versions that exist for a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - List getArtifactVersions(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException; - + List getArtifactVersions(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException; /** * Gets a sorted set of all artifact versions that exist for a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - List getArtifactVersions(String groupId, String artifactId, RetrievalBehavior behavior) throws ArtifactNotFoundException, RegistryStorageException; + List getArtifactVersions(String groupId, String artifactId, RetrievalBehavior behavior) + throws ArtifactNotFoundException, RegistryStorageException; /** * Fetch the versions of the given artifact * - * @param filters the search filters - * @param limit the result size limit - * @param offset the number of versions to skip + * @param filters the search filters + * @param limit the result size limit + * @param offset the number of versions to skip * @return the artifact versions, limited * @throws ArtifactNotFoundException * @throws RegistryStorageException */ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderBy, - OrderDirection orderDirection, int offset, int limit) throws ArtifactNotFoundException, RegistryStorageException; + OrderDirection orderDirection, int offset, int limit) + throws ArtifactNotFoundException, RegistryStorageException; /** * Gets the stored artifact content for the artifact version with the given unique global ID. @@ -360,60 +364,65 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * @throws ArtifactNotFoundException * @throws RegistryStorageException */ - StoredArtifactVersionDto getArtifactVersionContent(long globalId) throws ArtifactNotFoundException, RegistryStorageException; + StoredArtifactVersionDto getArtifactVersionContent(long globalId) + throws ArtifactNotFoundException, RegistryStorageException; /** * Gets the stored value for a single version of a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param version * @throws ArtifactNotFoundException * @throws VersionNotFoundException * @throws RegistryStorageException */ - StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; + StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, String version) + throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; /** * Deletes a single version of a given artifact. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param version * @throws ArtifactNotFoundException * @throws VersionNotFoundException * @throws RegistryStorageException */ - void deleteArtifactVersion(String groupId, String artifactId, String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; + void deleteArtifactVersion(String groupId, String artifactId, String version) + throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; /** - * Gets the stored meta-data for a single version of an artifact. This will return all meta-data for the + * Gets the stored meta-data for a single version of an artifact. This will return all meta-data for the * version, including any user edited meta-data along with anything generated by the artifactStore. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param version * @throws ArtifactNotFoundException * @throws VersionNotFoundException * @throws RegistryStorageException */ - ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; + ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, String version) + throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; /** - * Gets the stored meta-data for a single version of an artifact. This will return all meta-data for the + * Gets the stored meta-data for a single version of an artifact. This will return all meta-data for the * version, including any user edited meta-data along with anything generated by the artifactStore. * * @param globalId * @throws VersionNotFoundException * @throws RegistryStorageException */ - ArtifactVersionMetaDataDto getArtifactVersionMetaData(Long globalId) throws VersionNotFoundException, RegistryStorageException; + ArtifactVersionMetaDataDto getArtifactVersionMetaData(Long globalId) + throws VersionNotFoundException, RegistryStorageException; /** - * Updates the user-editable meta-data for a single version of a given artifact. Only the client-editable - * meta-data can be updated. Client editable meta-data includes e.g. name and description. + * Updates the user-editable meta-data for a single version of a given artifact. Only the client-editable + * meta-data can be updated. Client editable meta-data includes e.g. name and description. * - * @param groupId (optional) + * @param groupId (optional) * @param artifactId * @param version * @param metaData @@ -421,7 +430,9 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * @throws VersionNotFoundException * @throws RegistryStorageException */ - void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; + void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto metaData) + throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException; /** * Gets a list of all global rule names. @@ -431,14 +442,16 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB List getGlobalRules() throws RegistryStorageException; /** - * Creates a single global rule. Duplicates (by name) are not allowed. Stores the rule name and configuration. + * Creates a single global rule. Duplicates (by name) are not allowed. Stores the rule name and + * configuration. * * @param rule * @param config * @throws RuleAlreadyExistsException * @throws RegistryStorageException */ - void createGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleAlreadyExistsException, RegistryStorageException; + void createGlobalRule(RuleType rule, RuleConfigurationDto config) + throws RuleAlreadyExistsException, RegistryStorageException; /** * Deletes all of the globally configured rules. @@ -464,7 +477,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * @throws RuleNotFoundException * @throws RegistryStorageException */ - void updateGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleNotFoundException, RegistryStorageException; + void updateGlobalRule(RuleType rule, RuleConfigurationDto config) + throws RuleNotFoundException, RegistryStorageException; /** * Deletes a single global rule. @@ -476,7 +490,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, RegistryStorageException; /** - * Creates a new empty group and stores it's metadata. When creating an artifact the group is automatically created in it does not exist. + * Creates a new empty group and stores it's metadata. When creating an artifact the group is + * automatically created in it does not exist. * * @param group * @throws GroupAlreadyExistsException @@ -495,6 +510,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Updates the metadata for a group. + * * @param groupId * @param dto */ @@ -516,8 +532,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB GroupMetaDataDto getGroupMetaData(String groupId) throws GroupNotFoundException, RegistryStorageException; /** - * Called to export all data in the registry. Caller provides a handle to handle the data/entities. This - * should be used to stream the data from the storage to some output source (e.g. a HTTP response). It is + * Called to export all data in the registry. Caller provides a handle to handle the data/entities. This + * should be used to stream the data from the storage to some output source (e.g. a HTTP response). It is * important that the full dataset is *not* kept in memory. * * @param handler @@ -529,11 +545,14 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * Called to import previously exported data into the registry. * * @param entities - * @param preserveGlobalId Preserve global ids. If false, global ids will be set to next id in global id sequence. - * @param preserveContentId Preserve content id. If false, content ids will be set to the next ids in the content id sequence. Content-Version mapping will be preserved. + * @param preserveGlobalId Preserve global ids. If false, global ids will be set to next id in global id + * sequence. + * @param preserveContentId Preserve content id. If false, content ids will be set to the next ids in the + * content id sequence. Content-Version mapping will be preserved. * @throws RegistryStorageException */ - void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) throws RegistryStorageException; + void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) + throws RegistryStorageException; /** * Counts the total number of artifacts in the registry. @@ -552,6 +571,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Counts the number of active (not disabled) versions of an artifact. + * * @param groupId * @param artifactId */ @@ -571,7 +591,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * @param role * @param principalName */ - void createRoleMapping(String principalId, String role, String principalName) throws RegistryStorageException; + void createRoleMapping(String principalId, String role, String principalName) + throws RegistryStorageException; /** * Gets the list of all the role mappings in the registry. @@ -580,8 +601,9 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Search for role mappings. - * @param offset the number of artifacts to skip - * @param limit the result size limit + * + * @param offset the number of artifacts to skip + * @param limit the result size limit */ RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) throws RegistryStorageException; @@ -593,8 +615,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB RoleMappingDto getRoleMapping(String principalId) throws RegistryStorageException; /** - * Gets the role for a single user. This returns null if there is no role mapped for - * the given principal. + * Gets the role for a single user. This returns null if there is no role mapped for the given principal. * * @param principalId */ @@ -621,8 +642,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB void deleteAllUserData(); /** - * Called to create a single-use download "link". This can then be consumed using - * "consumeDownload()". Used to support browser flows for features like /admin/export. + * Called to create a single-use download "link". This can then be consumed using "consumeDownload()". + * Used to support browser flows for features like /admin/export. * * @param context * @throws RegistryStorageException @@ -637,8 +658,8 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB DownloadContextDto consumeDownload(String downloadId) throws RegistryStorageException; /** - * Called to delete any expired rows in the downloads table. This is basically cleaning up - * any single-use download links that were never "clicked". + * Called to delete any expired rows in the downloads table. This is basically cleaning up any single-use + * download links that were never "clicked". * * @throws RegistryStorageException */ @@ -653,22 +674,23 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB DynamicConfigPropertyDto getRawConfigProperty(String propertyName); /** - * Gets a list of properties with stale state. This would inform a caching - * layer that the cache should be invalidated. + * Gets a list of properties with stale state. This would inform a caching layer that the cache should be + * invalidated. * * @param since instant representing the last time this check was done (has anything changed since) * @return a list of stale configs */ List getStaleConfigProperties(Instant since); - /** - * @return The artifact references resolved as a map containing the reference name as key and the referenced artifact content. + * @return The artifact references resolved as a map containing the reference name as key and the + * referenced artifact content. */ Map resolveReferences(List references); /** * Quickly checks for the existence of a given artifact. + * * @param groupId * @param artifactId * @return true if an artifact exists with the coordinates passed as parameters @@ -678,6 +700,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Quickly checks for the existence of a given group. + * * @param groupId * @return true if a group exists with the id passed as parameter * @throws RegistryStorageException @@ -686,6 +709,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Gets a list of content IDs that have at least one reference to the given artifact version. + * * @param groupId * @param artifactId * @param version @@ -695,6 +719,7 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Gets a list of global IDs that have at least one reference to the given artifact version. + * * @param groupId * @param artifactId * @param version @@ -704,33 +729,37 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB /** * Gets a list of inbound references for a given artifact version. + * * @param groupId * @param artifactId * @param version * @return the list of inbound references to the given artifact version */ - List getInboundArtifactReferences(String groupId, String artifactId, String version); + List getInboundArtifactReferences(String groupId, String artifactId, + String version); /** * Quickly checks for the existence of a specific artifact version. + * * @param groupId * @param artifactId * @return true if an artifact version exists with the coordinates passed as parameters * @throws RegistryStorageException */ - boolean isArtifactVersionExists(String groupId, String artifactId, String version) throws RegistryStorageException; + boolean isArtifactVersionExists(String groupId, String artifactId, String version) + throws RegistryStorageException; /** * Search groups by given criteria * - * @param filters the set of filters to apply when searching - * @param orderBy the field to order by + * @param filters the set of filters to apply when searching + * @param orderBy the field to order by * @param orderDirection the direction to order the results - * @param offset the number of artifacts to skip - * @param limit the result size limit + * @param offset the number of artifacts to skip + * @param limit the result size limit */ - GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, OrderDirection orderDirection, Integer offset, Integer limit); - + GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, + OrderDirection orderDirection, Integer offset, Integer limit); /** * Creates a new comment for an artifact version. @@ -770,54 +799,77 @@ VersionSearchResultsDto searchVersions(Set filters, OrderBy orderB * @param commentId * @param value */ - void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value); - + void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, + String value); void resetGlobalId(); + void resetContentId(); - void resetCommentId(); + void resetCommentId(); long nextContentId(); + long nextGlobalId(); + long nextCommentId(); void importComment(CommentEntity entity); + void importGroup(GroupEntity entity); + void importGlobalRule(GlobalRuleEntity entity); + void importContent(ContentEntity entity); + void importArtifact(ArtifactEntity entity); + void importArtifactVersion(ArtifactVersionEntity entity); + void importArtifactRule(ArtifactRuleEntity entity); + void importBranch(BranchEntity entity); boolean isContentExists(String contentHash) throws RegistryStorageException; - boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) throws RegistryStorageException; + + boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) + throws RegistryStorageException; + boolean isGlobalRuleExists(RuleType rule) throws RegistryStorageException; + boolean isRoleMappingExists(String principalId); void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash); + Optional contentIdFromHash(String contentHash); BranchSearchResultsDto getBranches(GA ga, int offset, int limit); + BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions); + BranchMetaDataDto getBranchMetaData(GA ga, BranchId branchId); + void updateBranchMetaData(GA ga, BranchId branchId, EditableBranchMetaDataDto dto); + void deleteBranch(GA ga, BranchId branchId); + GAV getBranchTip(GA ga, BranchId branchId, RetrievalBehavior behavior); + VersionSearchResultsDto getBranchVersions(GA ga, BranchId branchId, int offset, int limit); + void replaceBranchVersions(GA ga, BranchId branchId, List versions); + void appendVersionToBranch(GA ga, BranchId branchId, VersionId version); /** - * Triggers a snapshot creation of the internal database. + * Triggers a snapshot creation of the internal database. * * @throws RegistryStorageException */ String triggerSnapshotCreation() throws RegistryStorageException; /** - * Creates the snapshot of the internal database based on configuration. + * Creates the snapshot of the internal database based on configuration. * * @param snapshotLocation * @throws RegistryStorageException @@ -832,5 +884,4 @@ enum RetrievalBehavior { SKIP_DISABLED_LATEST } - } diff --git a/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProducer.java b/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProducer.java index 78d647b14a..696504bf25 100644 --- a/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProducer.java +++ b/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProducer.java @@ -54,13 +54,13 @@ public RegistryStorage current() { .comparing(RegistryStorageDecorator::order); List activeDecorators = decorators.stream() - .filter(RegistryStorageDecorator::isEnabled) - .sorted(decoratorComparator) + .filter(RegistryStorageDecorator::isEnabled).sorted(decoratorComparator) .collect(Collectors.toList()); if (!activeDecorators.isEmpty()) { log.debug("Following RegistryStorage decorators have been enabled (in order): {}", - activeDecorators.stream().map(d -> d.getClass().getName()).collect(Collectors.toList())); + activeDecorators.stream().map(d -> d.getClass().getName()) + .collect(Collectors.toList())); for (int i = activeDecorators.size() - 1; i >= 0; i--) { RegistryStorageDecorator decorator = activeDecorators.get(i); @@ -75,7 +75,6 @@ public RegistryStorage current() { return cachedCurrent; } - @Produces @ApplicationScoped @Raw @@ -88,16 +87,17 @@ public RegistryStorage raw() { } else if ("sql".equals(registryStorageType)) { cachedRaw = sqlRegistryStorage; } else { - throw new IllegalStateException(String.format("No Registry storage variant defined for value %s", registryStorageType)); + throw new IllegalStateException(String + .format("No Registry storage variant defined for value %s", registryStorageType)); } cachedRaw.initialize(); - log.info("Using the following RegistryStorage implementation: {}", cachedRaw.getClass().getName()); + log.info("Using the following RegistryStorage implementation: {}", + cachedRaw.getClass().getName()); } return cachedRaw; } - @Produces @ApplicationScoped public DynamicConfigStorage configStorage() { diff --git a/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProvider.java b/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProvider.java index c1e4833c90..7090b39c25 100644 --- a/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProvider.java +++ b/app/src/main/java/io/apicurio/registry/storage/RegistryStorageProvider.java @@ -1,10 +1,8 @@ package io.apicurio.registry.storage; /** - * Provider interface for non-default storage interfaces. - * - * It's mandatory to implement this interface for non-default storage implementations. - * + * Provider interface for non-default storage interfaces. It's mandatory to implement this interface for + * non-default storage implementations. */ public interface RegistryStorageProvider { diff --git a/app/src/main/java/io/apicurio/registry/storage/StorageBehaviorProperties.java b/app/src/main/java/io/apicurio/registry/storage/StorageBehaviorProperties.java index 6a07da9769..e3dbafeb2e 100644 --- a/app/src/main/java/io/apicurio/registry/storage/StorageBehaviorProperties.java +++ b/app/src/main/java/io/apicurio/registry/storage/StorageBehaviorProperties.java @@ -2,9 +2,8 @@ import io.apicurio.common.apps.config.Info; import io.apicurio.registry.storage.RegistryStorage.RetrievalBehavior; -import org.eclipse.microprofile.config.inject.ConfigProperty; - import jakarta.enterprise.context.ApplicationScoped; +import org.eclipse.microprofile.config.inject.ConfigProperty; @ApplicationScoped public class StorageBehaviorProperties { diff --git a/app/src/main/java/io/apicurio/registry/storage/StorageEvent.java b/app/src/main/java/io/apicurio/registry/storage/StorageEvent.java index 831a91d1c2..124915bcfd 100644 --- a/app/src/main/java/io/apicurio/registry/storage/StorageEvent.java +++ b/app/src/main/java/io/apicurio/registry/storage/StorageEvent.java @@ -7,10 +7,9 @@ import lombok.ToString; /** - * CDI event fired by the storage implementation. - * Differs from {@see io.apicurio.registry.storage.impl.sql.SqlStorageEvent} because - * this event is fired by non-SQL implementations as well. - * + * CDI event fired by the storage implementation. Differs from + * {@see io.apicurio.registry.storage.impl.sql.SqlStorageEvent} because this event is fired by non-SQL + * implementations as well. */ @Builder @Getter diff --git a/app/src/main/java/io/apicurio/registry/storage/VersionStateExt.java b/app/src/main/java/io/apicurio/registry/storage/VersionStateExt.java index eba6e04015..b79177e827 100644 --- a/app/src/main/java/io/apicurio/registry/storage/VersionStateExt.java +++ b/app/src/main/java/io/apicurio/registry/storage/VersionStateExt.java @@ -1,16 +1,15 @@ package io.apicurio.registry.storage; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Consumer; - -import org.slf4j.Logger; - import io.apicurio.registry.storage.error.InvalidVersionStateException; import io.apicurio.registry.types.VersionState; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; +import org.slf4j.Logger; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Consumer; @ApplicationScoped public class VersionStateExt { @@ -24,7 +23,8 @@ public class VersionStateExt { transitions.put(VersionState.DEPRECATED, EnumSet.of(VersionState.ENABLED, VersionState.DISABLED)); } - public static final EnumSet ACTIVE_STATES = EnumSet.of(VersionState.ENABLED, VersionState.DEPRECATED, VersionState.DISABLED); + public static final EnumSet ACTIVE_STATES = EnumSet.of(VersionState.ENABLED, + VersionState.DEPRECATED, VersionState.DISABLED); @Inject Logger log; @@ -34,7 +34,8 @@ public boolean canTransition(VersionState before, VersionState after) { return states.contains(after); } - public void validateState(EnumSet states, VersionState state, String groupId, String artifactId, String version) { + public void validateState(EnumSet states, VersionState state, String groupId, + String artifactId, String version) { if (states != null && !states.contains(state)) { throw new InvalidVersionStateException(groupId, artifactId, version, state); } @@ -47,7 +48,8 @@ public void logIfDeprecated(String groupId, Object artifactId, Object version, V } } - public void applyState(Consumer consumer, VersionState previousState, VersionState newState) { + public void applyState(Consumer consumer, VersionState previousState, + VersionState newState) { if (previousState != newState) { if (previousState != null) { if (canTransition(previousState, newState)) { diff --git a/app/src/main/java/io/apicurio/registry/storage/decorator/ReadOnlyRegistryStorageDecorator.java b/app/src/main/java/io/apicurio/registry/storage/decorator/ReadOnlyRegistryStorageDecorator.java index f93ba93544..9ff07c9b27 100644 --- a/app/src/main/java/io/apicurio/registry/storage/decorator/ReadOnlyRegistryStorageDecorator.java +++ b/app/src/main/java/io/apicurio/registry/storage/decorator/ReadOnlyRegistryStorageDecorator.java @@ -29,9 +29,9 @@ import io.apicurio.registry.storage.impexp.EntityInputStream; import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.impexp.ArtifactEntity; -import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.ArtifactRuleEntity; import io.apicurio.registry.utils.impexp.ArtifactVersionEntity; +import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.CommentEntity; import io.apicurio.registry.utils.impexp.ContentEntity; import io.apicurio.registry.utils.impexp.GlobalRuleEntity; @@ -44,108 +44,105 @@ import java.util.function.Supplier; @ApplicationScoped -public class ReadOnlyRegistryStorageDecorator extends RegistryStorageDecoratorReadOnlyBase implements RegistryStorageDecorator { +public class ReadOnlyRegistryStorageDecorator extends RegistryStorageDecoratorReadOnlyBase + implements RegistryStorageDecorator { public static final String READ_ONLY_MODE_ENABLED_PROPERTY_NAME = "apicurio.storage.read-only.enabled"; - - @Dynamic(label = "Storage read-only mode", description = "When selected, " + - "Registry will return an error for operations that write to the storage (this property excepted).") + @Dynamic(label = "Storage read-only mode", description = "When selected, " + + "Registry will return an error for operations that write to the storage (this property excepted).") @ConfigProperty(name = READ_ONLY_MODE_ENABLED_PROPERTY_NAME, defaultValue = "false") @Info(category = "storage", description = "Enable Registry storage read-only mode", availableSince = "2.5.0.Final") Supplier readOnlyModeEnabled; - @Override public boolean isEnabled() { return true; } - @Override public int order() { return RegistryStorageDecoratorOrderConstants.READ_ONLY_DECORATOR; } - @Override public void setDelegate(RegistryStorage delegate) { super.setDelegate(delegate); } - private void checkReadOnly() { if (isReadOnly()) { throw new ReadOnlyStorageException("Unsupported write operation. Storage is in read-only mode."); } } - @Override public boolean isReadOnly() { return delegate.isReadOnly() || readOnlyModeEnabled.get(); } - @Override - public Pair createArtifact(String groupId, String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, List versionBranches) throws RegistryStorageException { + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws RegistryStorageException { checkReadOnly(); - return delegate.createArtifact(groupId, artifactId, artifactType, artifactMetaData, version, versionContent, versionMetaData, versionBranches); + return delegate.createArtifact(groupId, artifactId, artifactType, artifactMetaData, version, + versionContent, versionMetaData, versionBranches); } - @Override - public List deleteArtifact(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { + public List deleteArtifact(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException { checkReadOnly(); return delegate.deleteArtifact(groupId, artifactId); } - @Override public void deleteArtifacts(String groupId) throws RegistryStorageException { checkReadOnly(); delegate.deleteArtifacts(groupId); } - @Override - public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, List branches) throws RegistryStorageException { + public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, + String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, + List branches) throws RegistryStorageException { checkReadOnly(); - return delegate.createArtifactVersion(groupId, artifactId, version, artifactType, content, metaData, branches); + return delegate.createArtifactVersion(groupId, artifactId, version, artifactType, content, metaData, + branches); } - @Override - public void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) - throws ArtifactNotFoundException, RegistryStorageException { + public void updateArtifactMetaData(String groupId, String artifactId, + EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { checkReadOnly(); delegate.updateArtifactMetaData(groupId, artifactId, metaData); } - @Override - public void createArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) + public void createArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleAlreadyExistsException, RegistryStorageException { checkReadOnly(); delegate.createArtifactRule(groupId, artifactId, rule, config); } - @Override - public void deleteArtifactRules(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { + public void deleteArtifactRules(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException { checkReadOnly(); delegate.deleteArtifactRules(groupId, artifactId); } - @Override - public void updateArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) + public void updateArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { checkReadOnly(); delegate.updateArtifactRule(groupId, artifactId, rule, config); } - @Override public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { @@ -153,7 +150,6 @@ public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) delegate.deleteArtifactRule(groupId, artifactId, rule); } - @Override public void deleteArtifactVersion(String groupId, String artifactId, String version) throws ArtifactNotFoundException, RegistryStorageException { @@ -161,15 +157,13 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers delegate.deleteArtifactVersion(groupId, artifactId, version); } - @Override - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) - throws ArtifactNotFoundException, RegistryStorageException { + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { checkReadOnly(); delegate.updateArtifactVersionMetaData(groupId, artifactId, version, metaData); } - @Override public void createGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleAlreadyExistsException, RegistryStorageException { @@ -177,41 +171,37 @@ public void createGlobalRule(RuleType rule, RuleConfigurationDto config) delegate.createGlobalRule(rule, config); } - @Override public void deleteGlobalRules() throws RegistryStorageException { checkReadOnly(); delegate.deleteGlobalRules(); } - @Override - public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleNotFoundException, RegistryStorageException { + public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) + throws RuleNotFoundException, RegistryStorageException { checkReadOnly(); delegate.updateGlobalRule(rule, config); } - @Override public void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, RegistryStorageException { checkReadOnly(); delegate.deleteGlobalRule(rule); } - @Override - public void createGroup(GroupMetaDataDto group) throws GroupAlreadyExistsException, RegistryStorageException { + public void createGroup(GroupMetaDataDto group) + throws GroupAlreadyExistsException, RegistryStorageException { checkReadOnly(); delegate.createGroup(group); } - @Override public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) { checkReadOnly(); delegate.updateGroupMetaData(groupId, dto); } - @Override public void deleteGroup(String groupId) throws GroupNotFoundException, RegistryStorageException { @@ -219,7 +209,6 @@ public void deleteGroup(String groupId) throws GroupNotFoundException, RegistryS delegate.deleteGroup(groupId); } - @Override public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) throws RegistryStorageException { @@ -227,35 +216,31 @@ public void importData(EntityInputStream entities, boolean preserveGlobalId, boo delegate.importData(entities, preserveGlobalId, preserveContentId); } - @Override - public void createRoleMapping(String principalId, String role, String principalName) throws RegistryStorageException { + public void createRoleMapping(String principalId, String role, String principalName) + throws RegistryStorageException { checkReadOnly(); delegate.createRoleMapping(principalId, role, principalName); } - @Override public void deleteRoleMapping(String principalId) throws RegistryStorageException { checkReadOnly(); delegate.deleteRoleMapping(principalId); } - @Override public void updateRoleMapping(String principalId, String role) throws RegistryStorageException { checkReadOnly(); delegate.updateRoleMapping(principalId, role); } - @Override public void deleteAllUserData() throws RegistryStorageException { checkReadOnly(); delegate.deleteAllUserData(); } - @Override public void setConfigProperty(DynamicConfigPropertyDto propertyDto) { if (delegate.isReadOnly() || !READ_ONLY_MODE_ENABLED_PROPERTY_NAME.equals(propertyDto.getName())) { @@ -264,7 +249,6 @@ public void setConfigProperty(DynamicConfigPropertyDto propertyDto) { delegate.setConfigProperty(propertyDto); } - @Override public void deleteConfigProperty(String propertyName) { if (delegate.isReadOnly() || !READ_ONLY_MODE_ENABLED_PROPERTY_NAME.equals(propertyName)) { @@ -273,147 +257,129 @@ public void deleteConfigProperty(String propertyName) { delegate.deleteConfigProperty(propertyName); } - @Override - public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, String value) { + public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, + String value) { checkReadOnly(); return delegate.createArtifactVersionComment(groupId, artifactId, version, value); } - @Override - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { checkReadOnly(); delegate.deleteArtifactVersionComment(groupId, artifactId, version, commentId); } - @Override - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value) { + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, String value) { checkReadOnly(); delegate.updateArtifactVersionComment(groupId, artifactId, version, commentId, value); } - @Override public String createDownload(DownloadContextDto context) throws RegistryStorageException { checkReadOnly(); return delegate.createDownload(context); } - @Override public DownloadContextDto consumeDownload(String downloadId) throws RegistryStorageException { checkReadOnly(); return delegate.consumeDownload(downloadId); } - @Override public void deleteAllExpiredDownloads() throws RegistryStorageException { checkReadOnly(); delegate.deleteAllExpiredDownloads(); } - @Override public void resetGlobalId() { checkReadOnly(); delegate.resetGlobalId(); } - @Override public void resetContentId() { checkReadOnly(); delegate.resetContentId(); } - @Override public void resetCommentId() { checkReadOnly(); delegate.resetCommentId(); } - @Override public void importComment(CommentEntity entity) { checkReadOnly(); delegate.importComment(entity); } - @Override public void importGroup(GroupEntity entity) { checkReadOnly(); delegate.importGroup(entity); } - @Override public void importGlobalRule(GlobalRuleEntity entity) { checkReadOnly(); delegate.importGlobalRule(entity); } - @Override public void importContent(ContentEntity entity) { checkReadOnly(); delegate.importContent(entity); } - @Override public void importArtifactVersion(ArtifactVersionEntity entity) { checkReadOnly(); delegate.importArtifactVersion(entity); } - @Override public void importArtifact(ArtifactEntity entity) { checkReadOnly(); delegate.importArtifact(entity); } - @Override public void importArtifactRule(ArtifactRuleEntity entity) { checkReadOnly(); delegate.importArtifactRule(entity); } - @Override public void importBranch(BranchEntity entity) { checkReadOnly(); delegate.importBranch(entity); } - @Override public void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash) { checkReadOnly(); delegate.updateContentCanonicalHash(newCanonicalHash, contentId, contentHash); } - @Override public long nextContentId() { checkReadOnly(); return delegate.nextContentId(); } - @Override public long nextGlobalId() { checkReadOnly(); return delegate.nextGlobalId(); } - @Override public long nextCommentId() { checkReadOnly(); @@ -421,7 +387,8 @@ public long nextCommentId() { } @Override - public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions) { + public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, + List versions) { checkReadOnly(); return delegate.createBranch(ga, branchId, description, versions); } diff --git a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecorator.java b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecorator.java index 5e336a2a46..100bc3041f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecorator.java +++ b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecorator.java @@ -4,10 +4,8 @@ public interface RegistryStorageDecorator extends RegistryStorage { - boolean isEnabled(); - /** * Decorators are ordered by natural int ordering, e.g. one with a lower order value is executed first. *

@@ -15,6 +13,5 @@ public interface RegistryStorageDecorator extends RegistryStorage { */ int order(); - void setDelegate(RegistryStorage delegate); } diff --git a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorBase.java b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorBase.java index 307d3c004d..0206f6bb42 100644 --- a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorBase.java +++ b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorBase.java @@ -26,9 +26,9 @@ import io.apicurio.registry.storage.impexp.EntityInputStream; import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.impexp.ArtifactEntity; -import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.ArtifactRuleEntity; import io.apicurio.registry.utils.impexp.ArtifactVersionEntity; +import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.CommentEntity; import io.apicurio.registry.utils.impexp.ContentEntity; import io.apicurio.registry.utils.impexp.GlobalRuleEntity; @@ -41,80 +41,71 @@ * Forwards all method calls to the delegate, extends the read-only base. *

* This class is intended for extension, but is not abstract to catch missing methods. - * */ public class RegistryStorageDecoratorBase extends RegistryStorageDecoratorReadOnlyBase { - protected RegistryStorageDecoratorBase() { } @Override - public Pair createArtifact(String groupId, String artifactId, - String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, - ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, List versionBranches) throws RegistryStorageException { - return delegate.createArtifact(groupId, artifactId, artifactType, artifactMetaData, version, versionContent, - versionMetaData, versionBranches); + return delegate.createArtifact(groupId, artifactId, artifactType, artifactMetaData, version, + versionContent, versionMetaData, versionBranches); } - @Override public List deleteArtifact(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { return delegate.deleteArtifact(groupId, artifactId); } - @Override public void deleteArtifacts(String groupId) throws RegistryStorageException { delegate.deleteArtifacts(groupId); } - @Override public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, - String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, List branches) throws RegistryStorageException { - return delegate.createArtifactVersion(groupId, artifactId, version, artifactType, content, metaData, branches); + String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, + List branches) throws RegistryStorageException { + return delegate.createArtifactVersion(groupId, artifactId, version, artifactType, content, metaData, + branches); } - @Override public void updateArtifactMetaData(String groupId, String artifactId, - EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { + EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { delegate.updateArtifactMetaData(groupId, artifactId, metaData); } - @Override public void deleteArtifactRules(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { delegate.deleteArtifactRules(groupId, artifactId); } - @Override public void updateArtifactRule(String groupId, String artifactId, RuleType rule, - RuleConfigurationDto config) + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { delegate.updateArtifactRule(groupId, artifactId, rule, config); } - @Override public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { delegate.deleteArtifactRule(groupId, artifactId, rule); } - @Override public void deleteArtifactVersion(String groupId, String artifactId, String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { delegate.deleteArtifactVersion(groupId, artifactId, version); } - @Override public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) @@ -122,172 +113,150 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str delegate.updateArtifactVersionMetaData(groupId, artifactId, version, metaData); } - @Override public void createGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleAlreadyExistsException, RegistryStorageException { delegate.createGlobalRule(rule, config); } - @Override public void deleteGlobalRules() throws RegistryStorageException { delegate.deleteGlobalRules(); } - @Override public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) throws RuleNotFoundException, RegistryStorageException { delegate.updateGlobalRule(rule, config); } - @Override public void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, RegistryStorageException { delegate.deleteGlobalRule(rule); } - @Override public void createGroup(GroupMetaDataDto group) throws GroupAlreadyExistsException, RegistryStorageException { delegate.createGroup(group); } - @Override - public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) throws GroupNotFoundException, RegistryStorageException { + public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) + throws GroupNotFoundException, RegistryStorageException { delegate.updateGroupMetaData(groupId, dto); } - - + @Override public void deleteGroup(String groupId) throws GroupNotFoundException, RegistryStorageException { delegate.deleteGroup(groupId); } - @Override - public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) throws RegistryStorageException { + public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) + throws RegistryStorageException { delegate.importData(entities, preserveGlobalId, preserveContentId); } - @Override - public void createRoleMapping(String principalId, String role, String principalName) throws RegistryStorageException { + public void createRoleMapping(String principalId, String role, String principalName) + throws RegistryStorageException { delegate.createRoleMapping(principalId, role, principalName); } - @Override public void deleteRoleMapping(String principalId) throws RegistryStorageException { delegate.deleteRoleMapping(principalId); } - @Override public void updateRoleMapping(String principalId, String role) throws RegistryStorageException { delegate.updateRoleMapping(principalId, role); } - @Override public void deleteAllUserData() { delegate.deleteAllUserData(); } - @Override public String createDownload(DownloadContextDto context) throws RegistryStorageException { return delegate.createDownload(context); } - @Override public DownloadContextDto consumeDownload(String downloadId) throws RegistryStorageException { return delegate.consumeDownload(downloadId); } - @Override public void deleteAllExpiredDownloads() throws RegistryStorageException { delegate.deleteAllExpiredDownloads(); } - @Override public void setConfigProperty(DynamicConfigPropertyDto property) throws RegistryStorageException { delegate.setConfigProperty(property); } - @Override public void deleteConfigProperty(String propertyName) { delegate.deleteConfigProperty(propertyName); } - @Override - public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, String value) { + public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, + String value) { return delegate.createArtifactVersionComment(groupId, artifactId, version, value); } - @Override - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { delegate.deleteArtifactVersionComment(groupId, artifactId, version, commentId); } - @Override - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value) { + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, String value) { delegate.updateArtifactVersionComment(groupId, artifactId, version, commentId, value); } - @Override public void resetGlobalId() { delegate.resetGlobalId(); } - @Override public void resetContentId() { delegate.resetContentId(); } - @Override public void resetCommentId() { delegate.resetCommentId(); } - @Override public void importComment(CommentEntity entity) { delegate.importComment(entity); } - @Override public void importGroup(GroupEntity entity) { delegate.importGroup(entity); } - @Override public void importGlobalRule(GlobalRuleEntity entity) { delegate.importGlobalRule(entity); } - @Override public void importContent(ContentEntity entity) { delegate.importContent(entity); } - @Override public void importArtifactVersion(ArtifactVersionEntity entity) { delegate.importArtifactVersion(entity); @@ -303,37 +272,31 @@ public void importArtifactRule(ArtifactRuleEntity entity) { delegate.importArtifactRule(entity); } - @Override public void importBranch(BranchEntity entity) { delegate.importBranch(entity); } - @Override public void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash) { delegate.updateContentCanonicalHash(newCanonicalHash, contentId, contentHash); } - @Override public long nextContentId() { return delegate.nextContentId(); } - @Override public long nextGlobalId() { return delegate.nextGlobalId(); } - @Override public long nextCommentId() { return delegate.nextCommentId(); } - @Override public void deleteBranch(GA ga, BranchId branchId) { delegate.deleteBranch(ga, branchId); @@ -345,7 +308,8 @@ public void replaceBranchVersions(GA ga, BranchId branchId, List vers } @Override - public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions) { + public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, + List versions) { return delegate.createBranch(ga, branchId, description, versions); } diff --git a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorReadOnlyBase.java b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorReadOnlyBase.java index 29456c4fbb..5146d53b1a 100644 --- a/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorReadOnlyBase.java +++ b/app/src/main/java/io/apicurio/registry/storage/decorator/RegistryStorageDecoratorReadOnlyBase.java @@ -43,7 +43,6 @@ /** * Forwards all read-only method calls to the delegate. - * */ public abstract class RegistryStorageDecoratorReadOnlyBase implements RegistryStorage { @@ -105,7 +104,7 @@ public Set getArtifactIds(Integer limit) { @Override public ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, - OrderDirection orderDirection, int offset, int limit) { + OrderDirection orderDirection, int offset, int limit) { return delegate.searchArtifacts(filters, orderBy, orderDirection, offset, limit); } @@ -117,9 +116,10 @@ public ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId @Override public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, - boolean canonical, TypedContent content, List artifactReferences) + boolean canonical, TypedContent content, List artifactReferences) throws ArtifactNotFoundException, RegistryStorageException { - return delegate.getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, content, artifactReferences); + return delegate.getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, content, + artifactReferences); } @Override @@ -130,7 +130,7 @@ public List getArtifactRules(String groupId, String artifactId) @Override public void createArtifactRule(String groupId, String artifactId, RuleType rule, - RuleConfigurationDto config) + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleAlreadyExistsException, RegistryStorageException { delegate.createArtifactRule(groupId, artifactId, rule, config); } @@ -147,9 +147,9 @@ public List getArtifactVersions(String groupId, String artifactId) return delegate.getArtifactVersions(groupId, artifactId); } - @Override - public VersionSearchResultsDto searchVersions(Set filters, OrderBy orderBy, OrderDirection orderDirection, int offset, int limit) throws RegistryStorageException { + public VersionSearchResultsDto searchVersions(Set filters, OrderBy orderBy, + OrderDirection orderDirection, int offset, int limit) throws RegistryStorageException { return delegate.searchVersions(filters, orderBy, orderDirection, offset, limit); } @@ -160,14 +160,15 @@ public StoredArtifactVersionDto getArtifactVersionContent(long globalId) } @Override - public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, String version) + public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, + String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { return delegate.getArtifactVersionContent(groupId, artifactId, version); } @Override public ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, - String version) + String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { return delegate.getArtifactVersionMetaData(groupId, artifactId, version); } @@ -216,7 +217,8 @@ public long countArtifactVersions(String groupId, String artifactId) throws Regi } @Override - public long countActiveArtifactVersions(String groupId, String artifactId) throws RegistryStorageException { + public long countActiveArtifactVersions(String groupId, String artifactId) + throws RegistryStorageException { return delegate.countActiveArtifactVersions(groupId, artifactId); } @@ -241,7 +243,8 @@ public List getRoleMappings() throws RegistryStorageException { } @Override - public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) throws RegistryStorageException { + public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) + throws RegistryStorageException { return delegate.searchRoleMappings(offset, limit); } @@ -281,27 +284,32 @@ public boolean isGroupExists(String groupId) throws RegistryStorageException { } @Override - public boolean isArtifactVersionExists(String groupId, String artifactId, String version) throws RegistryStorageException { + public boolean isArtifactVersionExists(String groupId, String artifactId, String version) + throws RegistryStorageException { return delegate.isArtifactVersionExists(groupId, artifactId, version); } @Override - public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { + public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { return delegate.getContentIdsReferencingArtifactVersion(groupId, artifactId, version); } @Override - public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { + public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { return delegate.getGlobalIdsReferencingArtifactVersion(groupId, artifactId, version); } @Override - public List getInboundArtifactReferences(String groupId, String artifactId, String version) { + public List getInboundArtifactReferences(String groupId, String artifactId, + String version) { return delegate.getInboundArtifactReferences(groupId, artifactId, version); } @Override - public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, OrderDirection orderDirection, Integer offset, Integer limit) { + public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, + OrderDirection orderDirection, Integer offset, Integer limit) { return delegate.searchGroups(filters, orderBy, orderDirection, offset, limit); } @@ -316,7 +324,8 @@ public boolean isContentExists(String contentHash) throws RegistryStorageExcepti } @Override - public boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) throws RegistryStorageException { + public boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) + throws RegistryStorageException { return delegate.isArtifactRuleExists(groupId, artifactId, rule); } diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/BranchSearchResultsDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/BranchSearchResultsDto.java index d5542656a4..a2490779a5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/BranchSearchResultsDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/BranchSearchResultsDto.java @@ -11,7 +11,6 @@ import java.util.ArrayList; import java.util.List; - @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/EditableArtifactMetaDataDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/EditableArtifactMetaDataDto.java index ca0dc2e0b8..834717fbea 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/EditableArtifactMetaDataDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/EditableArtifactMetaDataDto.java @@ -1,7 +1,5 @@ package io.apicurio.registry.storage.dto; -import java.util.Map; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.Builder; @@ -11,6 +9,8 @@ import lombok.Setter; import lombok.ToString; +import java.util.Map; + @NoArgsConstructor @AllArgsConstructor @Builder @@ -20,13 +20,10 @@ @ToString @RegisterForReflection public class EditableArtifactMetaDataDto { - + public static EditableArtifactMetaDataDto fromEditableVersionMetaDataDto(EditableVersionMetaDataDto vmd) { - return EditableArtifactMetaDataDto.builder() - .name(vmd.getName()) - .description(vmd.getDescription()) - .labels(vmd.getLabels()) - .build(); + return EditableArtifactMetaDataDto.builder().name(vmd.getName()).description(vmd.getDescription()) + .labels(vmd.getLabels()).build(); } private String name; diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/EditableGroupMetaDataDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/EditableGroupMetaDataDto.java index f03f8885a8..b0ee13756d 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/EditableGroupMetaDataDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/EditableGroupMetaDataDto.java @@ -1,7 +1,5 @@ package io.apicurio.registry.storage.dto; -import java.util.Map; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.Builder; @@ -11,6 +9,8 @@ import lombok.Setter; import lombok.ToString; +import java.util.Map; + @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/EditableVersionMetaDataDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/EditableVersionMetaDataDto.java index e10ced4eab..290abeff41 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/EditableVersionMetaDataDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/EditableVersionMetaDataDto.java @@ -1,7 +1,5 @@ package io.apicurio.registry.storage.dto; -import java.util.Map; - import io.apicurio.registry.types.VersionState; import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; @@ -12,6 +10,8 @@ import lombok.Setter; import lombok.ToString; +import java.util.Map; + @NoArgsConstructor @AllArgsConstructor @Builder @@ -21,15 +21,12 @@ @ToString @RegisterForReflection public class EditableVersionMetaDataDto { - - public static EditableVersionMetaDataDto fromEditableArtifactMetaDataDto(EditableArtifactMetaDataDto amd) { - return EditableVersionMetaDataDto.builder() - .name(amd.getName()) - .description(amd.getDescription()) - .labels(amd.getLabels()) - .build(); - } + public static EditableVersionMetaDataDto fromEditableArtifactMetaDataDto( + EditableArtifactMetaDataDto amd) { + return EditableVersionMetaDataDto.builder().name(amd.getName()).description(amd.getDescription()) + .labels(amd.getLabels()).build(); + } private String name; private String description; diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/GroupSearchResultsDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/GroupSearchResultsDto.java index 85ccc31093..75f7f80bd7 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/GroupSearchResultsDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/GroupSearchResultsDto.java @@ -5,7 +5,6 @@ import java.util.ArrayList; import java.util.List; - @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/LazyContentList.java b/app/src/main/java/io/apicurio/registry/storage/dto/LazyContentList.java index 555f03701c..0d5770e821 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/LazyContentList.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/LazyContentList.java @@ -39,7 +39,7 @@ public boolean contains(Object o) { @Override public TypedContent get(int index) { - //Not the best solution, works for now... + // Not the best solution, works for now... return toTypedContent(storage.getContentById(contentIds.get(index))); } @@ -146,7 +146,7 @@ public void clear() { @Override public Spliterator spliterator() { - //prevent streaming on this list + // prevent streaming on this list throw new UnsupportedOperationException(); } diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/OrderBy.java b/app/src/main/java/io/apicurio/registry/storage/dto/OrderBy.java index a2310ebfe7..99384f16b4 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/OrderBy.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/OrderBy.java @@ -1,7 +1,7 @@ package io.apicurio.registry.storage.dto; public enum OrderBy { - name, createdOn, modifiedOn, // Shared + name, createdOn, modifiedOn, // Shared groupId, // Group specific artifactId, artifactType, // Artifact specific globalId, version // Version specific diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/OrderDirection.java b/app/src/main/java/io/apicurio/registry/storage/dto/OrderDirection.java index 085c723dec..0e77999041 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/OrderDirection.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/OrderDirection.java @@ -1,7 +1,7 @@ package io.apicurio.registry.storage.dto; public enum OrderDirection { - + asc, desc } diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/RoleMappingSearchResultsDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/RoleMappingSearchResultsDto.java index e72f9726b5..2015acdb17 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/RoleMappingSearchResultsDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/RoleMappingSearchResultsDto.java @@ -1,8 +1,5 @@ package io.apicurio.registry.storage.dto; -import java.util.ArrayList; -import java.util.List; - import io.quarkus.runtime.annotations.RegisterForReflection; import lombok.AllArgsConstructor; import lombok.Builder; @@ -12,6 +9,9 @@ import lombok.Setter; import lombok.ToString; +import java.util.ArrayList; +import java.util.List; + @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/SearchFilter.java b/app/src/main/java/io/apicurio/registry/storage/dto/SearchFilter.java index 375876fe67..ccc9a27cbf 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/SearchFilter.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/SearchFilter.java @@ -1,8 +1,7 @@ package io.apicurio.registry.storage.dto; -import org.apache.commons.lang3.tuple.Pair; - import io.apicurio.registry.types.VersionState; +import org.apache.commons.lang3.tuple.Pair; public class SearchFilter { @@ -18,6 +17,7 @@ public SearchFilter() { /** * Constructor. + * * @param type * @param value object */ @@ -27,11 +27,11 @@ private SearchFilter(SearchFilterType type, Object value) { } public static SearchFilter ofLabel(String labelKey, String labelValue) { - return new SearchFilter(SearchFilterType.labels, Pair.of(labelKey, labelValue)); + return new SearchFilter(SearchFilterType.labels, Pair. of(labelKey, labelValue)); } public static SearchFilter ofLabel(String labelKey) { - return new SearchFilter(SearchFilterType.labels, Pair.of(labelKey, null)); + return new SearchFilter(SearchFilterType.labels, Pair. of(labelKey, null)); } public static SearchFilter ofGlobalId(Long value) { @@ -111,7 +111,6 @@ public Number getNumberValue() { throw new IllegalStateException("value is not of type number"); } - /** * @param value the value to set */ @@ -154,7 +153,7 @@ public boolean isNot() { public void setNot(boolean not) { this.not = not; } - + public SearchFilter negated() { SearchFilter filter = new SearchFilter(type, value); filter.setNot(true); diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/SearchedBranchDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/SearchedBranchDto.java index 7c87718025..69e1023dec 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/SearchedBranchDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/SearchedBranchDto.java @@ -8,7 +8,6 @@ import lombok.Setter; import lombok.ToString; - @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/dto/SearchedGroupDto.java b/app/src/main/java/io/apicurio/registry/storage/dto/SearchedGroupDto.java index 8b550e1d3c..dadb8e09fd 100644 --- a/app/src/main/java/io/apicurio/registry/storage/dto/SearchedGroupDto.java +++ b/app/src/main/java/io/apicurio/registry/storage/dto/SearchedGroupDto.java @@ -4,7 +4,6 @@ import java.util.Date; - @NoArgsConstructor @AllArgsConstructor @Builder diff --git a/app/src/main/java/io/apicurio/registry/storage/error/AlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/AlreadyExistsException.java index 7e732091ef..4f9c38212d 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/AlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/AlreadyExistsException.java @@ -4,7 +4,6 @@ public abstract class AlreadyExistsException extends RegistryStorageException { private static final long serialVersionUID = 5055445625652989500L; - protected AlreadyExistsException(Throwable cause) { super(cause); } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/ArtifactAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/ArtifactAlreadyExistsException.java index 652868d33c..cac279f7a0 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/ArtifactAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/ArtifactAlreadyExistsException.java @@ -12,14 +12,12 @@ public class ArtifactAlreadyExistsException extends AlreadyExistsException { @Getter private String artifactId; - public ArtifactAlreadyExistsException(String groupId, String artifactId) { super(message(groupId, artifactId)); this.artifactId = artifactId; this.groupId = groupId; } - private static String message(String groupId, String artifactId) { return "An artifact with ID '" + artifactId + "' in group '" + groupId + "' already exists."; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/ArtifactNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/ArtifactNotFoundException.java index a5a8239525..4082185bb4 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/ArtifactNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/ArtifactNotFoundException.java @@ -13,27 +13,23 @@ public class ArtifactNotFoundException extends NotFoundException { @Getter private String artifactId; - public ArtifactNotFoundException(String groupId, String artifactId) { super(message(groupId, artifactId)); this.groupId = groupId; this.artifactId = artifactId; } - public ArtifactNotFoundException(String groupId, String artifactId, Throwable cause) { super(message(groupId, artifactId), cause); this.groupId = groupId; this.artifactId = artifactId; } - public ArtifactNotFoundException(String artifactId) { super(message(GroupId.DEFAULT.getRawGroupIdWithDefaultString(), artifactId)); this.artifactId = artifactId; } - private static String message(String groupId, String artifactId) { return "No artifact with ID '" + artifactId + "' in group '" + groupId + "' was found."; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/BranchNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/BranchNotFoundException.java index 480eb21374..c256894b16 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/BranchNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/BranchNotFoundException.java @@ -16,7 +16,6 @@ public BranchNotFoundException(String groupId, String artifactId, String branchI this.branchId = branchId; } - public BranchNotFoundException(String groupId, String artifactId, String branchId, Exception cause) { super(message(groupId, artifactId, branchId), cause); this.groupId = groupId; @@ -24,7 +23,6 @@ public BranchNotFoundException(String groupId, String artifactId, String branchI this.branchId = branchId; } - private static String message(String groupId, String artifactId, String branchId) { return "No branch '" + branchId + "' was found in " + groupId + "/" + artifactId + "."; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/CommentNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/CommentNotFoundException.java index 1219bc2826..44c26ffbcc 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/CommentNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/CommentNotFoundException.java @@ -9,7 +9,6 @@ public class CommentNotFoundException extends NotFoundException { @Getter private String commentId; - public CommentNotFoundException(String commentId) { super("No comment with ID '" + commentId + "' was found."); this.commentId = commentId; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/ConfigPropertyNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/ConfigPropertyNotFoundException.java index 70cfb58419..b371c03f15 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/ConfigPropertyNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/ConfigPropertyNotFoundException.java @@ -9,7 +9,6 @@ public class ConfigPropertyNotFoundException extends NotFoundException { @Getter private final String propertyName; - public ConfigPropertyNotFoundException(String propertyName) { super("No configuration property named '" + propertyName + "' was found."); this.propertyName = propertyName; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/ContentAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/ContentAlreadyExistsException.java index d062f81c56..8a452ccca0 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/ContentAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/ContentAlreadyExistsException.java @@ -9,7 +9,6 @@ public class ContentAlreadyExistsException extends AlreadyExistsException { @Getter private final Long contentId; - public ContentAlreadyExistsException(long contentId) { super("Content with ID " + contentId + " already exists."); this.contentId = contentId; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/ContentNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/ContentNotFoundException.java index f6c36a18f6..71a710abda 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/ContentNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/ContentNotFoundException.java @@ -12,19 +12,16 @@ public class ContentNotFoundException extends NotFoundException { @Getter private String contentHash; - public ContentNotFoundException(long contentId) { super(message(contentId, null)); this.contentId = contentId; } - public ContentNotFoundException(String contentHash) { super(message(null, contentHash)); this.contentHash = contentHash; } - private static String message(Long contentId, String contentHash) { if (contentId != null) { return "No content with ID '" + contentId + "' was found."; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/DownloadNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/DownloadNotFoundException.java index dd2e900eaa..c65c57ed65 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/DownloadNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/DownloadNotFoundException.java @@ -4,7 +4,6 @@ public class DownloadNotFoundException extends NotFoundException { private static final long serialVersionUID = -8634862918588649938L; - public DownloadNotFoundException() { super("Download not found."); } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/GroupAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/GroupAlreadyExistsException.java index f4371fb706..d714406a81 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/GroupAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/GroupAlreadyExistsException.java @@ -9,7 +9,6 @@ public class GroupAlreadyExistsException extends AlreadyExistsException { @Getter private final String groupId; - public GroupAlreadyExistsException(String groupId) { super("Group '" + groupId + "' already exists."); this.groupId = groupId; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/GroupNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/GroupNotFoundException.java index 3586377eb6..5866534351 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/GroupNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/GroupNotFoundException.java @@ -9,19 +9,16 @@ public class GroupNotFoundException extends NotFoundException { @Getter private final String groupId; - public GroupNotFoundException(String groupId) { super(message(groupId)); this.groupId = groupId; } - public GroupNotFoundException(String groupId, Throwable cause) { super(message(groupId), cause); this.groupId = groupId; } - private static String message(String groupId) { return "No group '" + groupId + "' was found."; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/InvalidArtifactStateException.java b/app/src/main/java/io/apicurio/registry/storage/error/InvalidArtifactStateException.java index 257c82677d..7ce17ee2f4 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/InvalidArtifactStateException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/InvalidArtifactStateException.java @@ -6,8 +6,10 @@ public class InvalidArtifactStateException extends RegistryStorageException { private static final long serialVersionUID = 1L; - public InvalidArtifactStateException(String groupId, String artifactId, String version, ArtifactState state) { - super(String.format("Artifact %s [%s] in group (%s) is not active: %s", artifactId, version, groupId, state)); + public InvalidArtifactStateException(String groupId, String artifactId, String version, + ArtifactState state) { + super(String.format("Artifact %s [%s] in group (%s) is not active: %s", artifactId, version, groupId, + state)); } public InvalidArtifactStateException(ArtifactState previousState, ArtifactState newState) { diff --git a/app/src/main/java/io/apicurio/registry/storage/error/InvalidVersionStateException.java b/app/src/main/java/io/apicurio/registry/storage/error/InvalidVersionStateException.java index 0ecdb40749..0a9de71e56 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/InvalidVersionStateException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/InvalidVersionStateException.java @@ -6,8 +6,10 @@ public class InvalidVersionStateException extends RegistryStorageException { private static final long serialVersionUID = 1L; - public InvalidVersionStateException(String groupId, String artifactId, String version, VersionState state) { - super(String.format("Artifact %s [%s] in group (%s) is not active: %s", artifactId, version, groupId, state)); + public InvalidVersionStateException(String groupId, String artifactId, String version, + VersionState state) { + super(String.format("Artifact %s [%s] in group (%s) is not active: %s", artifactId, version, groupId, + state)); } public InvalidVersionStateException(VersionState previousState, VersionState newState) { diff --git a/app/src/main/java/io/apicurio/registry/storage/error/LogConfigurationNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/LogConfigurationNotFoundException.java index fcbb01b09e..004a0df80f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/LogConfigurationNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/LogConfigurationNotFoundException.java @@ -9,19 +9,16 @@ public class LogConfigurationNotFoundException extends NotFoundException { @Getter private final String logger; - public LogConfigurationNotFoundException(String logger, Throwable cause) { super(message(logger), cause); this.logger = logger; } - public LogConfigurationNotFoundException(String logger) { super(message(logger)); this.logger = logger; } - private static String message(String logger) { return "No configuration found for logger '" + logger + "'"; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/NotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/NotFoundException.java index ec1ff8fb5f..e0396df040 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/NotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/NotFoundException.java @@ -4,7 +4,6 @@ public abstract class NotFoundException extends RegistryStorageException { private static final long serialVersionUID = 7134307797211927863L; - protected NotFoundException(Throwable cause) { super(cause); } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/RegistryStorageException.java b/app/src/main/java/io/apicurio/registry/storage/error/RegistryStorageException.java index 5b76ba454a..a050e0b021 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/RegistryStorageException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/RegistryStorageException.java @@ -10,7 +10,6 @@ public class RegistryStorageException extends RegistryException { private static final long serialVersionUID = 708084955101638005L; - public RegistryStorageException(Throwable cause) { super(cause); } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingAlreadyExistsException.java index 9fc95a1b60..9a7c3b912d 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingAlreadyExistsException.java @@ -12,7 +12,6 @@ public class RoleMappingAlreadyExistsException extends AlreadyExistsException { @Getter private String role; - public RoleMappingAlreadyExistsException(String principalId, String role) { super("A mapping for principal '" + principalId + "' and role '" + role + "' already exists."); this.principalId = principalId; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingNotFoundException.java index c698f8d6f1..e8d32f2e86 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/RoleMappingNotFoundException.java @@ -12,13 +12,11 @@ public class RoleMappingNotFoundException extends NotFoundException { @Getter private String role; - public RoleMappingNotFoundException(String principalId) { super("No role mapping for principal '" + principalId + "' was found."); this.principalId = principalId; } - public RoleMappingNotFoundException(String principalId, String role) { super("No mapping for principal '" + principalId + "' and role '" + role + "' was found."); this.principalId = principalId; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/RuleAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/RuleAlreadyExistsException.java index 0dde401843..3d801e3c64 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/RuleAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/RuleAlreadyExistsException.java @@ -7,11 +7,9 @@ public class RuleAlreadyExistsException extends AlreadyExistsException { private static final long serialVersionUID = 2412206165461946827L; - @Getter private final RuleType rule; - public RuleAlreadyExistsException(RuleType rule) { super("A rule named '" + rule.name() + "' already exists."); this.rule = rule; diff --git a/app/src/main/java/io/apicurio/registry/storage/error/RuleNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/RuleNotFoundException.java index 5a5dc5585e..f98da2f5c6 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/RuleNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/RuleNotFoundException.java @@ -7,11 +7,9 @@ public class RuleNotFoundException extends NotFoundException { private static final long serialVersionUID = -5024749463194169679L; - @Getter private final RuleType rule; - public RuleNotFoundException(RuleType rule) { super(message(rule)); this.rule = rule; @@ -22,7 +20,6 @@ public RuleNotFoundException(RuleType rule, Throwable cause) { this.rule = rule; } - private static String message(RuleType rule) { return "No rule named '" + rule.name() + "' was found."; } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsException.java b/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsException.java index 9ca08b227d..cc43a9c585 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsException.java @@ -18,7 +18,6 @@ public class VersionAlreadyExistsException extends AlreadyExistsException { @Getter private Long globalId; - public VersionAlreadyExistsException(String groupId, String artifactId, String version) { super(message(groupId, artifactId, version, null)); this.groupId = groupId; @@ -26,19 +25,17 @@ public VersionAlreadyExistsException(String groupId, String artifactId, String v this.version = version; } - public VersionAlreadyExistsException(long globalId) { super(message(null, null, null, globalId)); this.globalId = globalId; } - private static String message(String groupId, String artifactId, String version, Long globalId) { if (globalId != null) { return "An artifact with global ID '" + globalId + "' already exists."; } else { - return "An artifact version '" + version + "' for artifact ID '" + artifactId + "' " + - "in group '" + groupId + "' already exists."; + return "An artifact version '" + version + "' for artifact ID '" + artifactId + "' " + + "in group '" + groupId + "' already exists."; } } } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsOnBranchException.java b/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsOnBranchException.java index 5e101d405c..db0819e638 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsOnBranchException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/VersionAlreadyExistsOnBranchException.java @@ -12,8 +12,8 @@ public class VersionAlreadyExistsOnBranchException extends AlreadyExistsExceptio private String version; private String branchId; - - public VersionAlreadyExistsOnBranchException(String groupId, String artifactId, String version, String branchId) { + public VersionAlreadyExistsOnBranchException(String groupId, String artifactId, String version, + String branchId) { super(message(groupId, artifactId, version, branchId)); this.groupId = groupId; this.artifactId = artifactId; @@ -21,9 +21,8 @@ public VersionAlreadyExistsOnBranchException(String groupId, String artifactId, this.branchId = branchId; } - private static String message(String groupId, String artifactId, String version, String branchId) { - return "Version '" + version + "' (for artifact ID '" + artifactId + "' " + - "in group '" + groupId + "') already exists in branch '" + branchId + "'."; + return "Version '" + version + "' (for artifact ID '" + artifactId + "' " + "in group '" + groupId + + "') already exists in branch '" + branchId + "'."; } } diff --git a/app/src/main/java/io/apicurio/registry/storage/error/VersionNotFoundException.java b/app/src/main/java/io/apicurio/registry/storage/error/VersionNotFoundException.java index ce05661595..fdec51757c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/error/VersionNotFoundException.java +++ b/app/src/main/java/io/apicurio/registry/storage/error/VersionNotFoundException.java @@ -19,13 +19,11 @@ public class VersionNotFoundException extends NotFoundException { @Getter private Long globalId; - public VersionNotFoundException(long globalId) { super(message(null, null, null, globalId)); this.globalId = globalId; } - public VersionNotFoundException(String groupId, String artifactId, String version) { super(message(groupId, artifactId, version, null)); this.groupId = groupId; @@ -33,7 +31,6 @@ public VersionNotFoundException(String groupId, String artifactId, String versio this.version = version; } - public VersionNotFoundException(String groupId, String artifactId, String version, Throwable cause) { super(message(groupId, artifactId, version, null), cause); this.groupId = groupId; @@ -41,21 +38,20 @@ public VersionNotFoundException(String groupId, String artifactId, String versio this.version = version; } - public VersionNotFoundException(GAV gav, Throwable cause) { - super(message(gav.getRawGroupIdWithDefaultString(), gav.getRawArtifactId(), gav.getRawVersionId(), null), cause); + super(message(gav.getRawGroupIdWithDefaultString(), gav.getRawArtifactId(), gav.getRawVersionId(), + null), cause); this.groupId = gav.getRawGroupIdWithDefaultString(); this.artifactId = gav.getRawArtifactId(); this.version = gav.getRawVersionId(); } - private static String message(String groupId, String artifactId, String version, Long globalId) { if (globalId != null) { return "No version with global ID '" + globalId + "' found."; } else { - return "No version '" + version + "' found for artifact with ID '" + artifactId + "' " + - "in group '" + groupId + "'."; + return "No version '" + version + "' found for artifact with ID '" + artifactId + "' " + + "in group '" + groupId + "'."; } } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impexp/EntityInputStream.java b/app/src/main/java/io/apicurio/registry/storage/impexp/EntityInputStream.java index e0d85d2c7c..9b01c290ec 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impexp/EntityInputStream.java +++ b/app/src/main/java/io/apicurio/registry/storage/impexp/EntityInputStream.java @@ -5,7 +5,6 @@ import java.io.Closeable; import java.io.IOException; - public interface EntityInputStream extends Closeable { /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/AbstractReadOnlyRegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/AbstractReadOnlyRegistryStorage.java index 185c22fea7..6d4a76da3c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/AbstractReadOnlyRegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/AbstractReadOnlyRegistryStorage.java @@ -22,9 +22,9 @@ import io.apicurio.registry.storage.impexp.EntityInputStream; import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.impexp.ArtifactEntity; -import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.ArtifactRuleEntity; import io.apicurio.registry.utils.impexp.ArtifactVersionEntity; +import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.CommentEntity; import io.apicurio.registry.utils.impexp.ContentEntity; import io.apicurio.registry.utils.impexp.GlobalRuleEntity; @@ -35,260 +35,229 @@ public abstract class AbstractReadOnlyRegistryStorage implements RegistryStorage { - protected void readOnlyViolation() { // This should never happen due to the read-only decorator - throw new UnreachableCodeException("Storage is in read-only mode. ReadOnlyRegistryStorageDecorator should prevent this call."); + throw new UnreachableCodeException( + "Storage is in read-only mode. ReadOnlyRegistryStorageDecorator should prevent this call."); } - @Override public boolean isReadOnly() { return true; } - @Override - public Pair createArtifact(String groupId, String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, List versionBranches) throws RegistryStorageException { + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws RegistryStorageException { readOnlyViolation(); return null; } @Override - public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, List branches) throws RegistryStorageException { + public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, + String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, + List branches) throws RegistryStorageException { readOnlyViolation(); return null; } - @Override public List deleteArtifact(String groupId, String artifactId) throws RegistryStorageException { readOnlyViolation(); return null; } - @Override public void deleteArtifacts(String groupId) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) - throws RegistryStorageException { + public void updateArtifactMetaData(String groupId, String artifactId, + EditableArtifactMetaDataDto metaData) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void createArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) - throws RegistryStorageException { + public void createArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteArtifactRules(String groupId, String artifactId) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void updateArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) - throws RegistryStorageException { + public void updateArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteArtifactVersion(String groupId, String artifactId, String version) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) - throws RegistryStorageException { + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto metaData) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void createGlobalRule(RuleType rule, RuleConfigurationDto config) - throws RegistryStorageException { + public void createGlobalRule(RuleType rule, RuleConfigurationDto config) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteGlobalRules() throws RegistryStorageException { readOnlyViolation(); } - @Override public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteGlobalRule(RuleType rule) throws RegistryStorageException { readOnlyViolation(); } - @Override public void createGroup(GroupMetaDataDto group) throws RegistryStorageException { readOnlyViolation(); } - @Override public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) { readOnlyViolation(); } - @Override public void deleteGroup(String groupId) throws RegistryStorageException { readOnlyViolation(); } - @Override public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) throws RegistryStorageException { readOnlyViolation(); } - @Override - public void createRoleMapping(String principalId, String role, String principalName) throws RegistryStorageException { + public void createRoleMapping(String principalId, String role, String principalName) + throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteRoleMapping(String principalId) throws RegistryStorageException { readOnlyViolation(); } - @Override public void updateRoleMapping(String principalId, String role) throws RegistryStorageException { readOnlyViolation(); } - @Override public void deleteAllUserData() throws RegistryStorageException { readOnlyViolation(); } - @Override public String createDownload(DownloadContextDto context) throws RegistryStorageException { readOnlyViolation(); return null; } - @Override public DownloadContextDto consumeDownload(String downloadId) throws RegistryStorageException { readOnlyViolation(); return null; } - @Override public void deleteAllExpiredDownloads() throws RegistryStorageException { readOnlyViolation(); } - @Override public void setConfigProperty(DynamicConfigPropertyDto propertyDto) { readOnlyViolation(); } - @Override public void deleteConfigProperty(String propertyName) { readOnlyViolation(); } - @Override - public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, String value) { + public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, + String value) { readOnlyViolation(); return null; } - @Override - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { readOnlyViolation(); } - @Override - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value) { + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, String value) { readOnlyViolation(); } - @Override public void resetGlobalId() { readOnlyViolation(); } - @Override public void resetContentId() { readOnlyViolation(); } - @Override public void resetCommentId() { readOnlyViolation(); } - @Override public void importComment(CommentEntity entity) { readOnlyViolation(); } - @Override public void importGroup(GroupEntity entity) { readOnlyViolation(); } - @Override public void importGlobalRule(GlobalRuleEntity entity) { readOnlyViolation(); } - @Override public void importContent(ContentEntity entity) { readOnlyViolation(); } - @Override public void importArtifactVersion(ArtifactVersionEntity entity) { readOnlyViolation(); @@ -304,33 +273,28 @@ public void importArtifactRule(ArtifactRuleEntity entity) { readOnlyViolation(); } - @Override public void importBranch(BranchEntity entity) { readOnlyViolation(); } - @Override public void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash) { readOnlyViolation(); } - @Override public long nextContentId() { readOnlyViolation(); return 0; } - @Override public long nextGlobalId() { readOnlyViolation(); return 0; } - @Override public long nextCommentId() { readOnlyViolation(); @@ -343,7 +307,8 @@ public void deleteBranch(GA ga, BranchId branchId) { } @Override - public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions) { + public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, + List versions) { readOnlyViolation(); return null; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/BlueDatasourceProducer.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/BlueDatasourceProducer.java index af84dbb5a1..7b9dbfed92 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/BlueDatasourceProducer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/BlueDatasourceProducer.java @@ -59,8 +59,6 @@ public AgroalDataSource produceDatasource() throws SQLException { props.put(AgroalPropertiesReader.CREDENTIAL, password); props.put(AgroalPropertiesReader.PROVIDER_CLASS_NAME, databaseKind.getDriverClassName()); - return AgroalDataSource.from(new AgroalPropertiesReader() - .readProperties(props) - .get()); + return AgroalDataSource.from(new AgroalPropertiesReader().readProperties(props).get()); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitManager.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitManager.java index 11d32bb597..9ee4015b0f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitManager.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitManager.java @@ -1,6 +1,5 @@ package io.apicurio.registry.storage.impl.gitops; - import io.apicurio.common.apps.config.DynamicConfigPropertyDto; import io.apicurio.registry.content.TypedContent; import io.apicurio.registry.content.util.ContentTypeUtil; @@ -64,12 +63,10 @@ public class GitManager { @Getter private RevCommit previousCommit; - public void start() throws IOException, URISyntaxException, GitAPIException { initRepo(); } - private void initRepo() throws IOException, GitAPIException, URISyntaxException { var workDirPath = Paths.get(config.getWorkDir()); @@ -78,26 +75,16 @@ private void initRepo() throws IOException, GitAPIException, URISyntaxException if (Files.exists(gitPath.resolve("config"))) { git = Git.open(gitPath.toFile()); } else { - git = Git.init() - .setGitDir(gitPath.toFile()) - .setInitialBranch(UUID.randomUUID().toString()) + git = Git.init().setGitDir(gitPath.toFile()).setInitialBranch(UUID.randomUUID().toString()) .call(); } var previousOID = git.getRepository().resolve("refs/heads/empty"); if (previousOID == null) { - git.commit() - .setMessage("empty") - .setAllowEmpty(true) - .call(); + git.commit().setMessage("empty").setAllowEmpty(true).call(); - git.checkout() - .setName("empty") - .setCreateBranch(true) - .setForced(true) - .setOrphan(true) - .call(); + git.checkout().setName("empty").setCreateBranch(true).setForced(true).setOrphan(true).call(); previousOID = git.getRepository().resolve("refs/heads/empty"); } @@ -106,27 +93,19 @@ private void initRepo() throws IOException, GitAPIException, URISyntaxException originRemoteName = ensureRemote(config.getOriginRepoURI()); } - private String ensureRemote(String repoURI) throws GitAPIException, URISyntaxException { var repoURIish = new URIish(repoURI); - var remote = git.remoteList() - .call() - .stream() - .filter(r -> r.getURIs().stream().allMatch(u -> u.equals(repoURIish))) - .findAny(); + var remote = git.remoteList().call().stream() + .filter(r -> r.getURIs().stream().allMatch(u -> u.equals(repoURIish))).findAny(); if (remote.isPresent()) { return remote.get().getName(); } else { var name = UUID.randomUUID().toString(); - git.remoteAdd() - .setName(name) - .setUri(repoURIish) - .call(); + git.remoteAdd().setName(name).setUri(repoURIish).call(); return name; } } - /** * Checks the configured origin repo branch and returns the corresponding latest RevCommit */ @@ -136,12 +115,7 @@ public RevCommit poll() throws GitAPIException, IOException { var fetchRef = "refs/heads/" + config.getOriginRepoBranch() + ":" + updatedRef; - git.fetch() - .setRemote(originRemoteName) - .setRefSpecs(fetchRef) - .setDepth(1) - .setForceUpdate(true) - .call(); + git.fetch().setRemote(originRemoteName).setRefSpecs(fetchRef).setDepth(1).setForceUpdate(true).call(); var updatedOID = git.getRepository().resolve(updatedRef); if (updatedOID == null) { @@ -150,12 +124,10 @@ public RevCommit poll() throws GitAPIException, IOException { return git.getRepository().parseCommit(updatedOID); } - public void updateCurrentCommit(RevCommit currentCommit) { previousCommit = currentCommit; } - public void run(ProcessingState state, RevCommit updatedCommit) throws GitAPIException, IOException { if (updatedCommit == null || updatedCommit.equals(previousCommit)) { @@ -185,15 +157,12 @@ public void run(ProcessingState state, RevCommit updatedCommit) throws GitAPIExc log.debug("Processing {} files", state.getPathIndex().size()); process(state); - var unprocessed = state.getPathIndex().values().stream() - .filter(f -> !f.isProcessed()) - .map(GitFile::getPath) - .collect(Collectors.toList()); + var unprocessed = state.getPathIndex().values().stream().filter(f -> !f.isProcessed()) + .map(GitFile::getPath).collect(Collectors.toList()); log.debug("The following {} file(s) were not processed: {}", unprocessed.size(), unprocessed); } - private void process(ProcessingState state) { for (GitFile file : state.fromTypeIndex(Type.REGISTRY)) { @@ -220,11 +189,11 @@ private void process(ProcessingState state) { } } else { - log.warn("Git repository does not contain data for this registry (ID = {})", config.getRegistryId()); + log.warn("Git repository does not contain data for this registry (ID = {})", + config.getRegistryId()); } } - private void processSettings(ProcessingState state) { var settings = state.getCurrentRegistry().getSettings(); if (settings != null) { @@ -236,13 +205,13 @@ private void processSettings(ProcessingState state) { log.debug("Importing {}", dto); state.getStorage().setConfigProperty(dto); } catch (Exception ex) { - state.recordError("Could not import configuration property %s: %s", setting.getName(), ex.getMessage()); + state.recordError("Could not import configuration property %s: %s", setting.getName(), + ex.getMessage()); } } } } - private void processGlobalRules(ProcessingState state) { var globalRules = state.getCurrentRegistry().getGlobalRules(); if (globalRules != null) { @@ -254,13 +223,13 @@ private void processGlobalRules(ProcessingState state) { log.debug("Importing {}", e); state.getStorage().importGlobalRule(e); } catch (Exception ex) { - state.recordError("Could not import global rule %s: %s", globalRule.getType(), ex.getMessage()); + state.recordError("Could not import global rule %s: %s", globalRule.getType(), + ex.getMessage()); } } } } - private void processArtifact(ProcessingState state, GitFile artifactFile, Artifact artifact) { boolean artifactImported = false; String artifactType; @@ -306,7 +275,8 @@ private void processArtifact(ProcessingState state, GitFile artifactFile, Artifa } } catch (Exception ex) { state.recordError("Could not import artifact version '%s': %s", - artifact.getGroupId() + ":" + artifact.getId() + ":" + version.getId(), ex.getMessage()); + artifact.getGroupId() + ":" + artifact.getId() + ":" + version.getId(), + ex.getMessage()); } } processArtifactRules(state, artifact); @@ -317,7 +287,6 @@ private void processArtifact(ProcessingState state, GitFile artifactFile, Artifa } } - private void processArtifactRules(ProcessingState state, Artifact artifact) { var rules = artifact.getRules(); if (rules != null) { @@ -331,27 +300,26 @@ private void processArtifactRules(ProcessingState state, Artifact artifact) { log.debug("Importing {}", e); state.getStorage().importArtifactRule(e); } catch (Exception ex) { - state.recordError("Could not import rule %s for artifact '%s': %s", - rule.getType(), artifact.getGroupId() + ":" + artifact.getId(), ex.getMessage()); + state.recordError("Could not import rule %s for artifact '%s': %s", rule.getType(), + artifact.getGroupId() + ":" + artifact.getId(), ex.getMessage()); } } } } - private Group processGroupRef(ProcessingState state, String groupName) { - var groupFiles = state.fromTypeIndex(Type.GROUP).stream() - .filter(f -> { - Group group = f.getEntityUnchecked(); - return state.isCurrentRegistryId(group.getRegistryId()) && groupName.equals(group.getId()); - }) - .collect(Collectors.toList()); + var groupFiles = state.fromTypeIndex(Type.GROUP).stream().filter(f -> { + Group group = f.getEntityUnchecked(); + return state.isCurrentRegistryId(group.getRegistryId()) && groupName.equals(group.getId()); + }).collect(Collectors.toList()); if (groupFiles.isEmpty()) { - state.recordError("Could not find group with ID %s in registry %s", groupName, state.getCurrentRegistry().getId()); + state.recordError("Could not find group with ID %s in registry %s", groupName, + state.getCurrentRegistry().getId()); return null; } else if (groupFiles.size() > 1) { - state.recordError("Multiple groups with ID %s found in registry %s: %s", groupName, state.getCurrentRegistry().getId(), groupFiles); + state.recordError("Multiple groups with ID %s found in registry %s: %s", groupName, + state.getCurrentRegistry().getId(), groupFiles); return null; } else { var groupFile = groupFiles.get(0); @@ -373,7 +341,6 @@ private Group processGroupRef(ProcessingState state, String groupName) { } } - private Content processContent(ProcessingState state, GitFile base, String contentRef) { var contentFile = findFileByPathRef(state, base, contentRef); if (contentFile != null) { @@ -390,9 +357,12 @@ private Content processContent(ProcessingState state, GitFile base, String conte try { // FIXME need to better determine the content type? String contentType = ContentTypes.APPLICATION_JSON; - if (dataFile.getPath().toLowerCase().endsWith(".yaml") || dataFile.getPath().toLowerCase().endsWith(".yml")) { + if (dataFile.getPath().toLowerCase().endsWith(".yaml") + || dataFile.getPath().toLowerCase().endsWith(".yml")) { contentType = ContentTypes.APPLICATION_YAML; - } else if (dataFile.getPath().toLowerCase().endsWith(".xml") || dataFile.getPath().toLowerCase().endsWith(".wsdl") || dataFile.getPath().toLowerCase().endsWith(".xsd")) { + } else if (dataFile.getPath().toLowerCase().endsWith(".xml") + || dataFile.getPath().toLowerCase().endsWith(".wsdl") + || dataFile.getPath().toLowerCase().endsWith(".xsd")) { contentType = ContentTypes.APPLICATION_XML; } else if (dataFile.getPath().toLowerCase().endsWith(".proto")) { contentType = ContentTypes.APPLICATION_PROTOBUF; @@ -405,13 +375,18 @@ private Content processContent(ProcessingState state, GitFile base, String conte e.contentId = content.getId(); e.contentHash = content.getContentHash(); e.contentBytes = data.bytes(); - content.setArtifactType(utils.determineArtifactType(typedContent, content.getArtifactType())); - e.canonicalHash = utils.getCanonicalContentHash(typedContent, content.getArtifactType(), null, null); + content.setArtifactType( + utils.determineArtifactType(typedContent, content.getArtifactType())); + e.canonicalHash = utils.getCanonicalContentHash(typedContent, + content.getArtifactType(), null, null); e.artifactType = content.getArtifactType(); e.contentType = contentType; - if (contentFile.getPath().toLowerCase().endsWith(".yaml") || contentFile.getPath().toLowerCase().endsWith(".yml")) { + if (contentFile.getPath().toLowerCase().endsWith(".yaml") + || contentFile.getPath().toLowerCase().endsWith(".yml")) { e.contentType = ContentTypes.APPLICATION_YAML; - } else if (contentFile.getPath().toLowerCase().endsWith(".xml") || contentFile.getPath().toLowerCase().endsWith(".wsdl") || contentFile.getPath().toLowerCase().endsWith(".xsd")) { + } else if (contentFile.getPath().toLowerCase().endsWith(".xml") + || contentFile.getPath().toLowerCase().endsWith(".wsdl") + || contentFile.getPath().toLowerCase().endsWith(".xsd")) { e.contentType = ContentTypes.APPLICATION_XML; } else if (contentFile.getPath().toLowerCase().endsWith(".proto")) { e.contentType = ContentTypes.APPLICATION_PROTOBUF; @@ -422,7 +397,8 @@ private Content processContent(ProcessingState state, GitFile base, String conte dataFile.setProcessed(true); return content; } catch (Exception ex) { - state.recordError("Could not import content %s: %s", contentFile.getPath(), ex.getMessage()); + state.recordError("Could not import content %s: %s", contentFile.getPath(), + ex.getMessage()); return null; } } else { @@ -435,7 +411,8 @@ private Content processContent(ProcessingState state, GitFile base, String conte return content; } } else { - state.recordError("Content file %s does not belong to this registry", contentFile.getPath()); + state.recordError("Content file %s does not belong to this registry", + contentFile.getPath()); return null; } } else { @@ -449,13 +426,11 @@ private Content processContent(ProcessingState state, GitFile base, String conte } } - private GitFile findFileByPathRef(ProcessingState state, GitFile base, String path) { path = concat(concat(base.getPath(), ".."), path); return state.getPathIndex().get(path); } - @PreDestroy public void close() { if (git != null) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsConfigProperties.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsConfigProperties.java index b902ad56e6..98e5beaa16 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsConfigProperties.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsConfigProperties.java @@ -9,8 +9,8 @@ public class GitOpsConfigProperties { @ConfigProperty(name = "apicurio.gitops.id") - @Info(category = "gitops", description = "Identifier of this Registry instance. Only data that references this identifier " + - "will be loaded.", availableSince = "3.0.0") + @Info(category = "gitops", description = "Identifier of this Registry instance. Only data that references this identifier " + + "will be loaded.", availableSince = "3.0.0") @Getter String registryId; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsRegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsRegistryStorage.java index 71a766f8ef..d11ac0f549 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsRegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GitOpsRegistryStorage.java @@ -87,7 +87,6 @@ private enum State { READY_TO_WRITE, // Latest data has been published, and we are ready to write to the inactive storage } - @Override public void initialize() { log.info("Using GitOps storage"); @@ -107,7 +106,8 @@ public void initialize() { @Scheduled(concurrentExecution = SKIP, every = "{apicurio.gitops.refresh.every}") void refresh() { if (registryStorageType.equals("gitops")) { - log.debug("Running GitOps refresh. Active database is {} and state is {}.", active == green ? "green" : "blue", state); + log.debug("Running GitOps refresh. Active database is {} and state is {}.", + active == green ? "green" : "blue", state); switch (state) { case READY_TO_SWITCH: { try { @@ -126,7 +126,7 @@ void refresh() { throw new RuntimeException(e); } } - break; + break; case READY_TO_WRITE: { try { var updatedCommit = gitManager.poll(); @@ -153,13 +153,13 @@ void refresh() { throw new RuntimeException(e); } } - break; + break; } - log.debug("GitOps refresh finished. Active database is {} and state is {}.", active == green ? "green" : "blue", state); + log.debug("GitOps refresh finished. Active database is {} and state is {}.", + active == green ? "green" : "blue", state); } } - public T proxy(Function operation) { try { if (switchLock.readLock().tryLock(5, TimeUnit.SECONDS)) { @@ -169,14 +169,14 @@ public T proxy(Function operation) { switchLock.readLock().unlock(); } } else { - throw new RegistryStorageException("Could not acquire read lock to get the active storage within 5 seconds"); + throw new RegistryStorageException( + "Could not acquire read lock to get the active storage within 5 seconds"); } } catch (InterruptedException ex) { throw new RegistryStorageException("Could not acquire read lock to get the active storage", ex); } } - public void proxyAction(Consumer action) { try { if (switchLock.readLock().tryLock(5, TimeUnit.SECONDS)) { @@ -186,14 +186,14 @@ public void proxyAction(Consumer action) { switchLock.readLock().unlock(); } } else { - throw new RegistryStorageException("Could not acquire read lock to get the active storage within 5 seconds"); + throw new RegistryStorageException( + "Could not acquire read lock to get the active storage within 5 seconds"); } } catch (InterruptedException ex) { throw new RegistryStorageException("Could not acquire read lock to get the active storage", ex); } } - @Override public String storageName() { return "gitops"; @@ -204,30 +204,25 @@ public boolean isReady() { return true; } - @Override public boolean isAlive() { return true; } - @PreDestroy void onDestroy() { } - @Override public ContentWrapperDto getContentById(long contentId) { return proxy(storage -> storage.getContentById(contentId)); } - @Override public ContentWrapperDto getContentByHash(String contentHash) { return proxy(storage -> storage.getContentByHash(contentHash)); } - @Override public List getArtifactVersionsByContentId(long contentId) { return proxy(storage -> storage.getArtifactVersionsByContentId(contentId)); @@ -243,14 +238,15 @@ public Set getArtifactIds(Integer limit) { return proxy(storage -> storage.getArtifactIds(limit)); } - @Override - public ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, OrderDirection orderDirection, int offset, int limit) { + public ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, + OrderDirection orderDirection, int offset, int limit) { return proxy(storage -> storage.searchArtifacts(filters, orderBy, orderDirection, offset, limit)); } @Override - public VersionSearchResultsDto searchVersions(Set filters, OrderBy orderBy, OrderDirection orderDirection, int offset, int limit) throws RegistryStorageException { + public VersionSearchResultsDto searchVersions(Set filters, OrderBy orderBy, + OrderDirection orderDirection, int offset, int limit) throws RegistryStorageException { return proxy(storage -> storage.searchVersions(filters, orderBy, orderDirection, offset, limit)); } @@ -259,25 +255,23 @@ public ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId return proxy(storage -> storage.getArtifactMetaData(groupId, artifactId)); } - @Override - public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, boolean canonical, TypedContent content, List artifactReferences) { - return proxy(storage -> storage.getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, content, artifactReferences)); + public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, + boolean canonical, TypedContent content, List artifactReferences) { + return proxy(storage -> storage.getArtifactVersionMetaDataByContent(groupId, artifactId, canonical, + content, artifactReferences)); } - @Override public List getArtifactRules(String groupId, String artifactId) { return proxy(storage -> storage.getArtifactRules(groupId, artifactId)); } - @Override public RuleConfigurationDto getArtifactRule(String groupId, String artifactId, RuleType rule) { return proxy(storage -> storage.getArtifactRule(groupId, artifactId, rule)); } - @Override public List getArtifactVersions(String groupId, String artifactId) { return proxy(storage -> storage.getArtifactVersions(groupId, artifactId)); @@ -288,108 +282,95 @@ public List getArtifactVersions(String groupId, String artifactId, Retri return proxy(storage -> storage.getArtifactVersions(groupId, artifactId, behavior)); } - @Override public StoredArtifactVersionDto getArtifactVersionContent(long globalId) { return proxy(storage -> storage.getArtifactVersionContent(globalId)); } - @Override - public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, String version) { + public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, + String version) { return proxy(storage -> storage.getArtifactVersionContent(groupId, artifactId, version)); } - @Override - public ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, String version) { + public ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, + String version) { return proxy(storage -> storage.getArtifactVersionMetaData(groupId, artifactId, version)); } - + @Override public ArtifactVersionMetaDataDto getArtifactVersionMetaData(Long globalId) throws VersionNotFoundException, RegistryStorageException { return proxy(storage -> storage.getArtifactVersionMetaData(globalId)); } - @Override public List getGlobalRules() { return proxy(RegistryStorage::getGlobalRules); } - @Override public RuleConfigurationDto getGlobalRule(RuleType rule) { return proxy(storage -> storage.getGlobalRule(rule)); } - @Override public List getGroupIds(Integer limit) { return proxy(storage -> storage.getGroupIds(limit)); } - @Override public GroupMetaDataDto getGroupMetaData(String groupId) { return proxy(storage -> storage.getGroupMetaData(groupId)); } - @Override public void exportData(Function handler) { proxyAction(storage -> storage.exportData(handler)); } - @Override public long countArtifacts() { return proxy(RegistryStorage::countArtifacts); } - - + @Override public long countActiveArtifactVersions(String groupId, String artifactId) { return proxy(storage -> storage.countActiveArtifactVersions(groupId, artifactId)); } - @Override public long countArtifactVersions(String groupId, String artifactId) { return proxy(storage -> storage.countArtifactVersions(groupId, artifactId)); } - @Override public long countTotalArtifactVersions() { return proxy(RegistryStorage::countTotalArtifactVersions); } - @Override public List getRoleMappings() { return proxy(RegistryStorage::getRoleMappings); } - + @Override - public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) throws RegistryStorageException { + public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) + throws RegistryStorageException { return proxy(storage -> storage.searchRoleMappings(offset, limit)); } - @Override public RoleMappingDto getRoleMapping(String principalId) { return proxy(storage -> storage.getRoleMapping(principalId)); } - @Override public String getRoleForPrincipal(String principalId) { return proxy(storage -> storage.getRoleForPrincipal(principalId)); } - @Override public DynamicConfigPropertyDto getRawConfigProperty(String propertyName) { return proxy(storage -> storage.getRawConfigProperty(propertyName)); @@ -400,103 +381,91 @@ public List getStaleConfigProperties(Instant since) { return proxy(storage -> storage.getStaleConfigProperties(since)); } - @Override public boolean isContentExists(String contentHash) { return proxy(storage -> storage.isContentExists(contentHash)); } - @Override public boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) { return false; } - @Override public boolean isGlobalRuleExists(RuleType rule) { return proxy(storage -> storage.isGlobalRuleExists(rule)); } - @Override public boolean isRoleMappingExists(String principalId) { return proxy(storage -> storage.isRoleMappingExists(principalId)); } - @Override public Map resolveReferences(List references) { return proxy(storage -> storage.resolveReferences(references)); } - @Override public Optional contentIdFromHash(String contentHash) { return proxy(storage -> storage.contentIdFromHash(contentHash)); } - @Override public boolean isArtifactExists(String groupId, String artifactId) { return proxy(storage -> storage.isArtifactExists(groupId, artifactId)); } - @Override public boolean isGroupExists(String groupId) { return proxy(storage -> storage.isGroupExists(groupId)); } - @Override - public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { - return proxy(storage -> storage.getContentIdsReferencingArtifactVersion(groupId, artifactId, version)); + public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { + return proxy( + storage -> storage.getContentIdsReferencingArtifactVersion(groupId, artifactId, version)); } - @Override - public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { + public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { return proxy(storage -> storage.getGlobalIdsReferencingArtifactVersion(groupId, artifactId, version)); } - @Override - public List getInboundArtifactReferences(String groupId, String artifactId, String version) { + public List getInboundArtifactReferences(String groupId, String artifactId, + String version) { return proxy(storage -> storage.getInboundArtifactReferences(groupId, artifactId, version)); } - @Override public boolean isArtifactVersionExists(String groupId, String artifactId, String version) { return proxy(storage -> storage.isArtifactVersionExists(groupId, artifactId, version)); } - @Override - public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, OrderDirection orderDirection, Integer offset, Integer limit) { + public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, + OrderDirection orderDirection, Integer offset, Integer limit) { return proxy(storage -> storage.searchGroups(filters, orderBy, orderDirection, offset, limit)); } - @Override public List getArtifactVersionComments(String groupId, String artifactId, String version) { return proxy(storage -> storage.getArtifactVersionComments(groupId, artifactId, version)); } - @Override public DynamicConfigPropertyDto getConfigProperty(String propertyName) { return proxy(storage -> storage.getConfigProperty(propertyName)); } - @Override public List getConfigProperties() { return proxy(DynamicConfigStorage::getConfigProperties); } - @Override public BranchMetaDataDto getBranchMetaData(GA ga, BranchId branchId) { return proxy(storage -> storage.getBranchMetaData(ga, branchId)); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GreenDatasourceProducer.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GreenDatasourceProducer.java index 096405f606..c8965853a5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GreenDatasourceProducer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/GreenDatasourceProducer.java @@ -59,8 +59,6 @@ public AgroalDataSource produceDatasource() throws SQLException { props.put(AgroalPropertiesReader.CREDENTIAL, password); props.put(AgroalPropertiesReader.PROVIDER_CLASS_NAME, databaseKind.getDriverClassName()); - return AgroalDataSource.from(new AgroalPropertiesReader() - .readProperties(props) - .get()); + return AgroalDataSource.from(new AgroalPropertiesReader().readProperties(props).get()); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/ProcessingState.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/ProcessingState.java index 0f50fc3f1b..3b0e9aaa19 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/ProcessingState.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/ProcessingState.java @@ -38,7 +38,6 @@ public class ProcessingState { private final Map> typeIndex = new HashMap<>(); - public ProcessingState(RegistryStorage storage) { this.storage = storage; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Any.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Any.java index d40e49b8c3..b80a84812f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Any.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Any.java @@ -43,15 +43,11 @@ public static Optional from(ProcessingState state, String path, ContentHand if (type.isPresent()) { try { var entity = MAPPER.treeToValue(raw, type.get().getKlass()); - var any = Any.builder() - .raw(raw) - .type(type.get()) - .entity(entity) - .build(); + var any = Any.builder().raw(raw).type(type.get()).entity(entity).build(); return Optional.of(any); } catch (JsonProcessingException ex) { - state.recordError("Could not parse file %s as %s: %s", - path, type.get(), ex.getOriginalMessage()); + state.recordError("Could not parse file %s as %s: %s", path, type.get(), + ex.getOriginalMessage()); return Optional.empty(); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/GitFile.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/GitFile.java index 5fb9860da7..33f1bf7823 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/GitFile.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/GitFile.java @@ -40,11 +40,8 @@ public static GitFile create(ProcessingState state, String path, InputStream str var data = ContentHandle.create(stream); - return GitFile.builder() - .path(FilenameUtils.normalize(path)) - .data(data) - .any(Any.from(state, path, data)) - .build(); + return GitFile.builder().path(FilenameUtils.normalize(path)).data(data) + .any(Any.from(state, path, data)).build(); } public boolean isType(Type type) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Type.java b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Type.java index 5a024e7f44..1797bb73f3 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Type.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/gitops/model/Type.java @@ -14,10 +14,8 @@ @ToString(onlyExplicitlyIncluded = true) public enum Type { - REGISTRY("registry-v0", Registry.class), - GROUP("group-v0", Group.class), - ARTIFACT("artifact-v0", Artifact.class), - CONTENT("content-v0", Content.class); + REGISTRY("registry-v0", Registry.class), GROUP("group-v0", Group.class), ARTIFACT("artifact-v0", + Artifact.class), CONTENT("content-v0", Content.class); @ToString.Include private final String type; @@ -25,9 +23,7 @@ public enum Type { private final Class klass; public static Optional from(String type) { - return Arrays.stream(values()) - .filter(t -> t.type != null && t.type.equals(type)) - .findAny(); + return Arrays.stream(values()).filter(t -> t.type != null && t.type.equals(type)).findAny(); } Type(String type, Class klass) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/AbstractMessage.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/AbstractMessage.java index c1740b5f69..b68983ef15 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/AbstractMessage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/AbstractMessage.java @@ -7,9 +7,7 @@ public abstract class AbstractMessage implements KafkaSqlMessage { */ @Override public KafkaSqlMessageKey getKey() { - return KafkaSqlMessageKey.builder() - .messageType(getClass().getSimpleName()) - .build(); + return KafkaSqlMessageKey.builder().messageType(getClass().getSimpleName()).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlConfiguration.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlConfiguration.java index 56e2b18c75..f31d3261af 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlConfiguration.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlConfiguration.java @@ -4,15 +4,26 @@ public interface KafkaSqlConfiguration { String bootstrapServers(); + String topic(); + String snapshotsTopic(); + String snapshotEvery(); + String snapshotLocation(); + Properties topicProperties(); + boolean isTopicAutoCreate(); + Integer pollTimeout(); + Integer responseTimeout(); + Properties producerProperties(); + Properties consumerProperties(); + Properties adminProperties(); } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlCoordinator.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlCoordinator.java index dc335ece56..7b14b9868c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlCoordinator.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlCoordinator.java @@ -11,9 +11,9 @@ import java.util.concurrent.TimeUnit; /** - * Coordinates "write" responses across threads in the Kafka-SQL artifactStore implementation. Basically this is used - * to communicate between the Kafka consumer thread and the waiting HTTP/API thread, where the HTTP thread is - * waiting for an operation to be completed by the Kafka consumer thread. + * Coordinates "write" responses across threads in the Kafka-SQL artifactStore implementation. Basically this + * is used to communicate between the Kafka consumer thread and the waiting HTTP/API thread, where the HTTP + * thread is waiting for an operation to be completed by the Kafka consumer thread. */ @ApplicationScoped public class KafkaSqlCoordinator { @@ -35,9 +35,9 @@ public UUID createUUID() { } /** - * Waits for a response to the operation with the given UUID. There is a countdown latch for each operation. The - * caller waiting for the response will wait for the countdown to happen and then proceed. We also remove - * the latch from the Map here since it's not needed anymore. + * Waits for a response to the operation with the given UUID. There is a countdown latch for each + * operation. The caller waiting for the response will wait for the countdown to happen and then proceed. + * We also remove the latch from the Map here since it's not needed anymore. * * @param uuid * @throws InterruptedException @@ -54,33 +54,35 @@ public Object waitForResponse(UUID uuid) { } return rval; } catch (InterruptedException e) { - throw new RegistryException("[KafkaSqlCoordinator] Thread interrupted waiting for a Kafka Sql response.", e); + throw new RegistryException( + "[KafkaSqlCoordinator] Thread interrupted waiting for a Kafka Sql response.", e); } finally { latches.remove(uuid); } } /** - * Countdown the latch for the given UUID. This will wake up the thread waiting for the response - * so that it can proceed. + * Countdown the latch for the given UUID. This will wake up the thread waiting for the response so that + * it can proceed. + * * @param uuid * @param returnValue */ public void notifyResponse(UUID uuid, Object returnValue) { - //we are re-using the topic from a streams based registry instance + // we are re-using the topic from a streams based registry instance if (uuid == null) { return; } // If there is no countdown latch, then there is no HTTP thread waiting for - // a response. This means one of two possible things: - // 1) We're in a cluster and the HTTP thread is on another node - // 2) We're starting up and consuming all the old journal entries + // a response. This means one of two possible things: + // 1) We're in a cluster and the HTTP thread is on another node + // 2) We're starting up and consuming all the old journal entries if (!latches.containsKey(uuid)) { return; } - // Otherwise, put the return value in the Map and countdown the latch. The latch + // Otherwise, put the return value in the Map and countdown the latch. The latch // countdown will notify the HTTP thread that the operation is complete and there is // a return value waiting for it. if (returnValue == null) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlFactory.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlFactory.java index 85cdd2890c..5f83017e7e 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlFactory.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlFactory.java @@ -75,24 +75,18 @@ public class KafkaSqlFactory { Integer responseTimeout; @Inject - @RegistryProperties( - value = { "apicurio.kafka.common", "apicurio.kafkasql.producer" }, - empties = { "ssl.endpoint.identification.algorithm=" } - ) + @RegistryProperties(value = { "apicurio.kafka.common", "apicurio.kafkasql.producer" }, empties = { + "ssl.endpoint.identification.algorithm=" }) Properties producerProperties; @Inject - @RegistryProperties( - value = { "apicurio.kafka.common", "apicurio.kafkasql.consumer" }, - empties = { "ssl.endpoint.identification.algorithm=" } - ) + @RegistryProperties(value = { "apicurio.kafka.common", "apicurio.kafkasql.consumer" }, empties = { + "ssl.endpoint.identification.algorithm=" }) Properties consumerProperties; @Inject - @RegistryProperties( - value = { "apicurio.kafka.common", "apicurio.kafkasql.admin" }, - empties = { "ssl.endpoint.identification.algorithm=" } - ) + @RegistryProperties(value = { "apicurio.kafka.common", "apicurio.kafkasql.admin" }, empties = { + "ssl.endpoint.identification.algorithm=" }) Properties adminProperties; @ConfigProperty(name = "apicurio.kafkasql.security.sasl.enabled", defaultValue = "false") @@ -319,22 +313,24 @@ public KafkaConsumer createKafkaSnapshotsConsumer() { private void tryToConfigureSecurity(Properties props) { protocol.ifPresent(s -> props.putIfAbsent("security.protocol", s)); - //Try to configure sasl for authentication + // Try to configure sasl for authentication if (saslEnabled) { - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", clientId, clientSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + clientId, clientSecret, tokenEndpoint)); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, saslMechanism); props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, loginCallbackHandler); } - //Try to configure the trustStore, if specified + // Try to configure the trustStore, if specified if (trustStoreLocation.isPresent() && trustStorePassword.isPresent() && trustStoreType.isPresent()) { props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, trustStoreType.get()); props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, trustStoreLocation.get()); props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, trustStorePassword.get()); } - //Finally, try to configure the keystore, if specified + // Finally, try to configure the keystore, if specified if (keyStoreLocation.isPresent() && keyStorePassword.isPresent() && keyStoreType.isPresent()) { props.putIfAbsent(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, keyStoreType.get()); props.putIfAbsent(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, keyStoreLocation.get()); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlMessageKey.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlMessageKey.java index ada7ac8355..ad85467d56 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlMessageKey.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlMessageKey.java @@ -1,7 +1,5 @@ package io.apicurio.registry.storage.impl.kafkasql; -import org.apache.kafka.common.Uuid; - import lombok.AllArgsConstructor; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -9,6 +7,7 @@ import lombok.NoArgsConstructor; import lombok.Setter; import lombok.ToString; +import org.apache.kafka.common.Uuid; /** * When the KSQL artifactStore publishes a message to its Kafka topic, the message key will be a class that @@ -19,10 +18,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class KafkaSqlMessageKey { - + @Builder.Default private String uuid = Uuid.randomUuid().toString(); private String messageType; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorage.java index d1b42aafca..c105f01090 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorage.java @@ -31,9 +31,9 @@ import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.ConcurrentUtil; import io.apicurio.registry.utils.impexp.ArtifactEntity; -import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.ArtifactRuleEntity; import io.apicurio.registry.utils.impexp.ArtifactVersionEntity; +import io.apicurio.registry.utils.impexp.BranchEntity; import io.apicurio.registry.utils.impexp.CommentEntity; import io.apicurio.registry.utils.impexp.ContentEntity; import io.apicurio.registry.utils.impexp.GlobalRuleEntity; @@ -61,9 +61,9 @@ import java.util.*; /** - * An implementation of a registry artifactStore that extends the basic SQL artifactStore but federates 'write' operations - * to other nodes in a cluster using a Kafka topic. As a result, all reads are performed locally but all - * writes are published to a topic for consumption by all nodes. + * An implementation of a registry artifactStore that extends the basic SQL artifactStore but federates + * 'write' operations to other nodes in a cluster using a Kafka topic. As a result, all reads are performed + * locally but all writes are published to a topic for consumption by all nodes. */ @ApplicationScoped @PersistenceExceptionLivenessApply @@ -112,7 +112,7 @@ public class KafkaSqlRegistryStorage extends RegistryStorageDecoratorReadOnlyBas private volatile boolean stopped = true; private volatile boolean snapshotProcessed = false; - //The snapshot id used to determine if this replica must process a snapshot message + // The snapshot id used to determine if this replica must process a snapshot message private volatile String lastTriggeredSnapshot = null; @Override @@ -124,20 +124,20 @@ public String storageName() { public void initialize() { log.info("Using Kafka-SQL artifactStore."); - //First, if needed create the Kafka topics. + // First, if needed create the Kafka topics. if (configuration.isTopicAutoCreate()) { autoCreateTopics(); } - //Try to restore the internal database from a snapshot + // Try to restore the internal database from a snapshot final long bootstrapStart = System.currentTimeMillis(); String snapshotId = consumeSnapshotsTopic(snapshotsConsumer); - //Once the topics are created, and the snapshots processed, initialize the internal SQL Storage. + // Once the topics are created, and the snapshots processed, initialize the internal SQL Storage. sqlStore.initialize(); setDelegate(sqlStore); - //Once the SQL storage has been initialized, start the Kafka consumer thread. + // Once the SQL storage has been initialized, start the Kafka consumer thread. log.info("SQL store initialized, starting consumer thread."); startConsumerThread(journalConsumer, snapshotId, bootstrapStart); } @@ -166,38 +166,41 @@ void onDestroy() { private void autoCreateTopics() { Set topicNames = Set.of(configuration.topic(), configuration.snapshotsTopic()); Map topicProperties = new HashMap<>(); - configuration.topicProperties().forEach((key, value) -> topicProperties.put(key.toString(), value.toString())); + configuration.topicProperties() + .forEach((key, value) -> topicProperties.put(key.toString(), value.toString())); Properties adminProperties = configuration.adminProperties(); - adminProperties.putIfAbsent(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, configuration.bootstrapServers()); + adminProperties.putIfAbsent(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, + configuration.bootstrapServers()); try { KafkaUtil.createTopics(adminProperties, topicNames, topicProperties); - } - catch (TopicExistsException e) { + } catch (TopicExistsException e) { log.info("Topic {} already exists, skipping.", configuration.topic()); } } /** - * Consume the snapshots topic, looking for the most recent snapshots in the topic. Once found, it restores the internal h2 database using the snapshot's content. - * WARNING: This has the limitation of processing the first 500 snapshots, which should be enough for most deployments. + * Consume the snapshots topic, looking for the most recent snapshots in the topic. Once found, it + * restores the internal h2 database using the snapshot's content. WARNING: This has the limitation of + * processing the first 500 snapshots, which should be enough for most deployments. */ private String consumeSnapshotsTopic(KafkaConsumer snapshotsConsumer) { // Subscribe to the snapshots topic Collection topics = Collections.singleton(configuration.snapshotsTopic()); snapshotsConsumer.subscribe(topics); - ConsumerRecords records = snapshotsConsumer.poll(Duration.ofMillis(configuration.pollTimeout())); + ConsumerRecords records = snapshotsConsumer + .poll(Duration.ofMillis(configuration.pollTimeout())); List> snapshots = new ArrayList<>(); String snapshotRecordKey = null; if (records != null && !records.isEmpty()) { - //collect all snapshots into a list + // collect all snapshots into a list records.forEach(snapshots::add); - //sort snapshots by timestamp + // sort snapshots by timestamp snapshots.sort(Comparator.comparingLong(ConsumerRecord::timestamp)); Path mostRecentSnapshotPath = null; for (ConsumerRecord snapshotFound : snapshots) { - //Restore database from snapshot + // Restore database from snapshot try { String path = snapshotFound.value(); if (null != path && !path.isBlank() && Files.exists(Path.of(snapshotFound.value()))) { @@ -205,13 +208,15 @@ private String consumeSnapshotsTopic(KafkaConsumer snapshotsCons snapshotRecordKey = snapshotFound.key(); mostRecentSnapshotPath = Path.of(snapshotFound.value()); } - } - catch (IllegalArgumentException ex) { - log.warn("Snapshot with path {} ignored, the snapshot is likely invalid or cannot be found", snapshotFound.value()); + } catch (IllegalArgumentException ex) { + log.warn( + "Snapshot with path {} ignored, the snapshot is likely invalid or cannot be found", + snapshotFound.value()); } } - //Here we have the most recent snapshot that we can find, try to restore the internal database from it. + // Here we have the most recent snapshot that we can find, try to restore the internal database + // from it. if (null != mostRecentSnapshotPath) { log.info("Restoring snapshot {} to the internal database...", mostRecentSnapshotPath); sqlStore.restoreFromSnapshot(mostRecentSnapshotPath.toString()); @@ -222,11 +227,12 @@ private String consumeSnapshotsTopic(KafkaConsumer snapshotsCons } /** - * Start the KSQL Kafka consumer thread which is responsible for subscribing to the kafka topic, - * consuming JournalRecord entries found on that topic, and applying those journal entries to - * the internal data model. + * Start the KSQL Kafka consumer thread which is responsible for subscribing to the kafka topic, consuming + * JournalRecord entries found on that topic, and applying those journal entries to the internal data + * model. */ - private void startConsumerThread(final KafkaConsumer consumer, String snapshotId, long bootstrapStart) { + private void startConsumerThread(final KafkaConsumer consumer, + String snapshotId, long bootstrapStart) { log.info("Starting KSQL consumer thread on topic: {}", configuration.topic()); log.info("Bootstrap servers: {}", configuration.bootstrapServers()); @@ -242,37 +248,46 @@ private void startConsumerThread(final KafkaConsumer records = consumer.poll(Duration.ofMillis(configuration.pollTimeout())); + final ConsumerRecords records = consumer + .poll(Duration.ofMillis(configuration.pollTimeout())); if (records != null && !records.isEmpty()) { log.debug("Consuming {} journal records.", records.count()); if (null != snapshotId && !snapshotProcessed) { - //If there is a snapshot key present, we process (and discard) all the messages until we find the snapshot marker that corresponds to the snapshot key. - Iterator> it = records.iterator(); + // If there is a snapshot key present, we process (and discard) all the messages + // until we find the snapshot marker that corresponds to the snapshot key. + Iterator> it = records + .iterator(); while (it.hasNext() && !snapshotProcessed) { ConsumerRecord record = it.next(); if (processSnapshot(snapshotId, record)) { - log.debug("Snapshot marker found {} the new messages will be applied on top of the snapshot data.", record.key()); + log.debug( + "Snapshot marker found {} the new messages will be applied on top of the snapshot data.", + record.key()); snapshotProcessed = true; break; - } - else { - log.debug("Discarding message with key {} as it was sent before a newer snapshot was created.", record.key()); + } else { + log.debug( + "Discarding message with key {} as it was sent before a newer snapshot was created.", + record.key()); } } - //If the snapshot marker has not been found, continue with message skipping until we find it. + // If the snapshot marker has not been found, continue with message skipping until + // we find it. if (snapshotProcessed) { - //Once the snapshot marker message has been found, we can process the rest of the messages as usual, applying the new changes on top of the existing ones in the snapshot. + // Once the snapshot marker message has been found, we can process the rest of + // the messages as usual, applying the new changes on top of the existing ones + // in the snapshot. while (it.hasNext()) { ConsumerRecord record = it.next(); processRecord(record, bootstrapId, bootstrapStart); } } - } - else { - //If there is no snapshot, simply process the existing messages in the kafka topic as usual. + } else { + // If there is no snapshot, simply process the existing messages in the kafka + // topic as usual. records.forEach(record -> processRecord(record, bootstrapId, bootstrapStart)); } } @@ -286,33 +301,39 @@ private void startConsumerThread(final KafkaConsumer record) { - return record.value() instanceof CreateSnapshot1Message && snapshotId.equals(((CreateSnapshot1Message) record.value()).getSnapshotId()); + private boolean processSnapshot(String snapshotId, + ConsumerRecord record) { + return record.value() instanceof CreateSnapshot1Message + && snapshotId.equals(((CreateSnapshot1Message) record.value()).getSnapshotId()); } - private void processRecord(ConsumerRecord record, String bootstrapId, long bootstrapStart) { + private void processRecord(ConsumerRecord record, String bootstrapId, + long bootstrapStart) { // If the key is null, we couldn't deserialize the message if (record.key() == null) { log.warn("Discarded an unreadable/unrecognized Kafka message."); return; } - // If the key is a Bootstrap key, then we have processed all messages and can set bootstrapped to 'true' + // If the key is a Bootstrap key, then we have processed all messages and can set bootstrapped to + // 'true' if ("Bootstrap".equals(record.key().getMessageType())) { KafkaSqlMessageKey bkey = (KafkaSqlMessageKey) record.key(); if (bkey.getUuid().equals(bootstrapId)) { this.bootstrapped = true; - storageEvent.fireAsync(StorageEvent.builder() - .type(StorageEventType.READY) - .build()); - log.info("KafkaSQL storage bootstrapped in {} ms.", System.currentTimeMillis() - bootstrapStart); + storageEvent.fireAsync(StorageEvent.builder().type(StorageEventType.READY).build()); + log.info("KafkaSQL storage bootstrapped in {} ms.", + System.currentTimeMillis() - bootstrapStart); } return; } - // If the key is a CreateSnapshotMessage key, but this replica does not have the snapshotId, it means that it wasn't triggered here, so just skip the message. - if (record.value() instanceof CreateSnapshot1Message && !((CreateSnapshot1Message) record.value()).getSnapshotId().equals(lastTriggeredSnapshot)) { - log.debug("Snapshot trigger message with id {} being skipped since this replica did not trigger the creation.", + // If the key is a CreateSnapshotMessage key, but this replica does not have the snapshotId, it means + // that it wasn't triggered here, so just skip the message. + if (record.value() instanceof CreateSnapshot1Message + && !((CreateSnapshot1Message) record.value()).getSnapshotId().equals(lastTriggeredSnapshot)) { + log.debug( + "Snapshot trigger message with id {} being skipped since this replica did not trigger the creation.", ((CreateSnapshot1Message) record.value()).getSnapshotId()); return; } @@ -324,7 +345,8 @@ private void processRecord(ConsumerRecord r return; } - // TODO instead of processing the journal record directly on the consumer thread, instead queue them and have *another* thread process the queue + // TODO instead of processing the journal record directly on the consumer thread, instead queue them + // and have *another* thread process the queue kafkaSqlSink.processMessage(record); } @@ -349,14 +371,14 @@ public void deleteConfigProperty(String propertyName) { } @Override - public Pair createArtifact(String groupId, String artifactId, - String artifactType, EditableArtifactMetaDataDto artifactMetaData, String version, - ContentWrapperDto versionContent, - EditableVersionMetaDataDto versionMetaData, List versionBranches) - throws RegistryStorageException { + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws RegistryStorageException { String content = versionContent != null ? versionContent.getContent().content() : null; String contentType = versionContent != null ? versionContent.getContentType() : null; - List references = versionContent != null ? versionContent.getReferences() : null; + List references = versionContent != null ? versionContent.getReferences() + : null; var message = new CreateArtifact8Message(groupId, artifactId, artifactType, artifactMetaData, version, contentType, content, references, versionMetaData, versionBranches); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); @@ -368,7 +390,8 @@ public Pair createArtifact(Stri */ @SuppressWarnings("unchecked") @Override - public List deleteArtifact(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { + public List deleteArtifact(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException { var message = new DeleteArtifact2Message(groupId, artifactId); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); return (List) coordinator.waitForResponse(uuid); @@ -386,30 +409,32 @@ public void deleteArtifacts(String groupId) throws RegistryStorageException { @Override public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, - String artifactType, ContentWrapperDto contentDto, EditableVersionMetaDataDto metaData, List branches) - throws RegistryStorageException { + String artifactType, ContentWrapperDto contentDto, EditableVersionMetaDataDto metaData, + List branches) throws RegistryStorageException { String content = contentDto != null ? contentDto.getContent().content() : null; String contentType = contentDto != null ? contentDto.getContentType() : null; List references = contentDto != null ? contentDto.getReferences() : null; - var message = new CreateArtifactVersion7Message(groupId, artifactId, version, artifactType, contentType, - content, references, metaData, branches); + var message = new CreateArtifactVersion7Message(groupId, artifactId, version, artifactType, + contentType, content, references, metaData, branches); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); return (ArtifactVersionMetaDataDto) coordinator.waitForResponse(uuid); } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactMetaData(java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableArtifactMetaDataDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactMetaData(java.lang.String, + * java.lang.String, io.apicurio.registry.storage.dto.EditableArtifactMetaDataDto) */ @Override public void updateArtifactMetaData(String groupId, String artifactId, - EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { + EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { var message = new UpdateArtifactMetaData3Message(groupId, artifactId, metaData); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } /** - * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactRules(java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactRules(java.lang.String, + * java.lang.String) */ @Override public void deleteArtifactRules(String groupId, String artifactId) @@ -420,10 +445,13 @@ public void deleteArtifactRules(String groupId, String artifactId) } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactRule(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType, io.apicurio.registry.storage.dto.RuleConfigurationDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactRule(java.lang.String, + * java.lang.String, io.apicurio.registry.types.RuleType, + * io.apicurio.registry.storage.dto.RuleConfigurationDto) */ @Override - public void updateArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) + public void updateArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { var message = new UpdateArtifactRule4Message(groupId, artifactId, rule, config); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); @@ -431,7 +459,8 @@ public void updateArtifactRule(String groupId, String artifactId, RuleType rule, } /** - * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactRule(java.lang.String, java.lang.String, io.apicurio.registry.types.RuleType) + * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactRule(java.lang.String, + * java.lang.String, io.apicurio.registry.types.RuleType) */ @Override public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) @@ -442,7 +471,8 @@ public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) } /** - * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactVersion(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactVersion(java.lang.String, + * java.lang.String, java.lang.String) */ @Override public void deleteArtifactVersion(String groupId, String artifactId, String version) @@ -453,10 +483,12 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactVersionMetaData(java.lang.String, java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableVersionMetaDataDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactVersionMetaData(java.lang.String, + * java.lang.String, java.lang.String, io.apicurio.registry.storage.dto.EditableVersionMetaDataDto) */ @Override - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData) + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto metaData) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { var message = new UpdateArtifactVersionMetaData4Message(groupId, artifactId, version, metaData); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); @@ -464,7 +496,8 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str } /** - * @see io.apicurio.registry.storage.RegistryStorage#createGlobalRule(io.apicurio.registry.types.RuleType, io.apicurio.registry.storage.dto.RuleConfigurationDto) + * @see io.apicurio.registry.storage.RegistryStorage#createGlobalRule(io.apicurio.registry.types.RuleType, + * io.apicurio.registry.storage.dto.RuleConfigurationDto) */ @Override public void createGlobalRule(RuleType rule, RuleConfigurationDto config) @@ -485,7 +518,8 @@ public void deleteGlobalRules() throws RegistryStorageException { } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateGlobalRule(io.apicurio.registry.types.RuleType, io.apicurio.registry.storage.dto.RuleConfigurationDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateGlobalRule(io.apicurio.registry.types.RuleType, + * io.apicurio.registry.storage.dto.RuleConfigurationDto) */ @Override public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) @@ -509,7 +543,8 @@ public void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, Regist * @see io.apicurio.registry.storage.RegistryStorage#createGroup(io.apicurio.registry.storage.dto.GroupMetaDataDto) */ @Override - public void createGroup(GroupMetaDataDto group) throws GroupAlreadyExistsException, RegistryStorageException { + public void createGroup(GroupMetaDataDto group) + throws GroupAlreadyExistsException, RegistryStorageException { var message = new CreateGroup1Message(group); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); @@ -526,7 +561,8 @@ public void deleteGroup(String groupId) throws GroupNotFoundException, RegistryS } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateGroupMetaData(java.lang.String, io.apicurio.registry.storage.dto.EditableGroupMetaDataDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateGroupMetaData(java.lang.String, + * io.apicurio.registry.storage.dto.EditableGroupMetaDataDto) */ @Override public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) { @@ -536,30 +572,32 @@ public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) { } /** - * @see io.apicurio.registry.storage.RegistryStorage#importData(io.apicurio.registry.storage.impexp.EntityInputStream, boolean, boolean) + * @see io.apicurio.registry.storage.RegistryStorage#importData(io.apicurio.registry.storage.impexp.EntityInputStream, + * boolean, boolean) */ @Override public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) throws RegistryStorageException { - DataImporter dataImporter = new SqlDataImporter(log, utils, this, preserveGlobalId, preserveContentId); + DataImporter dataImporter = new SqlDataImporter(log, utils, this, preserveGlobalId, + preserveContentId); dataImporter.importData(entities, () -> { // Because importing just pushes a bunch of Kafka messages, we may need to - // wait for a few seconds before we send the reset messages. Due to partitioning, + // wait for a few seconds before we send the reset messages. Due to partitioning, // we can't guarantee ordering of these next two messages, and we NEED them to // be consumed after all the import messages. // TODO We can wait until the last message is read (a specific one), // or create a new message type for this purpose (a sync message). try { Thread.sleep(2000); - } - catch (Exception e) { + } catch (Exception e) { // Noop } }); } /** - * @see io.apicurio.registry.storage.RegistryStorage#createRoleMapping(java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#createRoleMapping(java.lang.String, java.lang.String, + * java.lang.String) */ @Override public void createRoleMapping(String principalId, String role, String principalName) @@ -630,31 +668,38 @@ public void deleteAllExpiredDownloads() throws RegistryStorageException { } /** - * @see io.apicurio.registry.storage.RegistryStorage#createArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#createArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String) */ @Override - public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, String value) { + public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, + String value) { var message = new CreateArtifactVersionComment4Message(groupId, artifactId, version, value); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); return (CommentDto) coordinator.waitForResponse(uuid); } /** - * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#deleteArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String) */ @Override - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { var message = new DeleteArtifactVersionComment4Message(groupId, artifactId, version, commentId); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactVersionComment(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#updateArtifactVersionComment(java.lang.String, + * java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value) { - var message = new UpdateArtifactVersionComment5Message(groupId, artifactId, version, commentId, value); + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, String value) { + var message = new UpdateArtifactVersionComment5Message(groupId, artifactId, version, commentId, + value); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } @@ -798,7 +843,8 @@ public void importBranch(BranchEntity entity) { } /** - * @see io.apicurio.registry.storage.RegistryStorage#updateContentCanonicalHash(java.lang.String, long, java.lang.String) + * @see io.apicurio.registry.storage.RegistryStorage#updateContentCanonicalHash(java.lang.String, long, + * java.lang.String) */ @Override public void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash) { @@ -809,46 +855,53 @@ public void updateContentCanonicalHash(String newCanonicalHash, long contentId, @Override public void appendVersionToBranch(GA ga, BranchId branchId, VersionId version) { - var message = new AppendVersionToBranch3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), branchId.getRawBranchId(), version.getRawVersionId()); + var message = new AppendVersionToBranch3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), + branchId.getRawBranchId(), version.getRawVersionId()); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } @Override public void updateBranchMetaData(GA ga, BranchId branchId, EditableBranchMetaDataDto dto) { - var message = new UpdateBranchMetaData3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), branchId.getRawBranchId(), dto); + var message = new UpdateBranchMetaData3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), + branchId.getRawBranchId(), dto); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } @Override public void replaceBranchVersions(GA ga, BranchId branchId, List versions) { - var message = new ReplaceBranchVersions3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), branchId.getRawBranchId(), - versions.stream().map(VersionId::getRawVersionId).toList()); + var message = new ReplaceBranchVersions3Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), + branchId.getRawBranchId(), versions.stream().map(VersionId::getRawVersionId).toList()); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } @Override - public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions) { - var message = new CreateBranch4Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), branchId.getRawBranchId(), description, versions); + public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, + List versions) { + var message = new CreateBranch4Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), + branchId.getRawBranchId(), description, versions); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); return (BranchMetaDataDto) coordinator.waitForResponse(uuid); } /** - * @see io.apicurio.registry.storage.RegistryStorage#deleteBranch(io.apicurio.registry.model.GA, io.apicurio.registry.model.BranchId) + * @see io.apicurio.registry.storage.RegistryStorage#deleteBranch(io.apicurio.registry.model.GA, + * io.apicurio.registry.model.BranchId) */ @Override public void deleteBranch(GA ga, BranchId branchId) { - var message = new DeleteBranch2Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), branchId.getRawBranchId()); + var message = new DeleteBranch2Message(ga.getRawGroupIdWithNull(), ga.getRawArtifactId(), + branchId.getRawBranchId()); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); coordinator.waitForResponse(uuid); } @Override public String triggerSnapshotCreation() throws RegistryStorageException { - //First we generate an identifier for the snapshot, then we send a snapshot marker to the journal topic. + // First we generate an identifier for the snapshot, then we send a snapshot marker to the journal + // topic. String snapshotId = UUID.randomUUID().toString(); Path path = Path.of(configuration.snapshotLocation(), snapshotId + ".sql"); var message = new CreateSnapshot1Message(path.toString(), snapshotId); @@ -856,9 +909,10 @@ public String triggerSnapshotCreation() throws RegistryStorageException { log.debug("Snapshot with id {} triggered.", snapshotId); var uuid = ConcurrentUtil.get(submitter.submitMessage(message)); String snapshotLocation = (String) coordinator.waitForResponse(uuid); - //Then we send a new message to the snapshots topic, using the snapshot id as the key of the snapshot message. - ProducerRecord record = new ProducerRecord<>(configuration.snapshotsTopic(), 0, snapshotId, snapshotLocation, - Collections.emptyList()); + // Then we send a new message to the snapshots topic, using the snapshot id as the key of the snapshot + // message. + ProducerRecord record = new ProducerRecord<>(configuration.snapshotsTopic(), 0, + snapshotId, snapshotLocation, Collections.emptyList()); RecordMetadata recordMetadata = ConcurrentUtil.get(snapshotsProducer.apply(record)); return snapshotLocation; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSubmitter.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSubmitter.java index a32dc43e48..6a8b295c45 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSubmitter.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSubmitter.java @@ -1,20 +1,19 @@ package io.apicurio.registry.storage.impl.kafkasql; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.CompletableFuture; - -import jakarta.inject.Named; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.common.header.internals.RecordHeader; - import io.apicurio.common.apps.logging.Logged; import io.apicurio.registry.utils.kafka.ProducerActions; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.enterprise.event.Shutdown; import jakarta.inject.Inject; +import jakarta.inject.Named; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.header.internals.RecordHeader; + +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; @ApplicationScoped @Logged @@ -36,27 +35,31 @@ public class KafkaSqlSubmitter { public KafkaSqlSubmitter() { } - //Once the application is done, close the producer. + // Once the application is done, close the producer. public void handleShutdown(@Observes Shutdown shutdownEvent) throws Exception { producer.close(); } /** * Sends a message to the Kafka topic. + * * @param key * @param value */ public CompletableFuture send(KafkaSqlMessageKey key, KafkaSqlMessage value) { UUID requestId = coordinator.createUUID(); - RecordHeader requestIdHeader = new RecordHeader("req", requestId.toString().getBytes(StandardCharsets.UTF_8)); - RecordHeader messageTypeHeader = new RecordHeader("mt", key.getMessageType().getBytes(StandardCharsets.UTF_8)); - ProducerRecord record = new ProducerRecord<>(configuration.topic(), 0, key, value, - List.of(requestIdHeader, messageTypeHeader)); + RecordHeader requestIdHeader = new RecordHeader("req", + requestId.toString().getBytes(StandardCharsets.UTF_8)); + RecordHeader messageTypeHeader = new RecordHeader("mt", + key.getMessageType().getBytes(StandardCharsets.UTF_8)); + ProducerRecord record = new ProducerRecord<>( + configuration.topic(), 0, key, value, List.of(requestIdHeader, messageTypeHeader)); return producer.apply(record).thenApply(rm -> requestId); } - + public void submitBootstrap(String bootstrapId) { - KafkaSqlMessageKey key = KafkaSqlMessageKey.builder().messageType("Bootstrap").uuid(bootstrapId).build(); + KafkaSqlMessageKey key = KafkaSqlMessageKey.builder().messageType("Bootstrap").uuid(bootstrapId) + .build(); send(key, null); } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifact8Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifact8Message.java index 1de35c005a..6dfc0ab129 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifact8Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifact8Message.java @@ -44,11 +44,10 @@ public class CreateArtifact8Message extends AbstractMessage { public Object dispatchTo(RegistryStorage storage) { ContentHandle handle = content != null ? ContentHandle.create(content) : null; ContentWrapperDto versionContent = content != null ? ContentWrapperDto.builder() - .contentType(contentType) - .content(handle) - .references(references) - .build() : null; - return storage.createArtifact(groupId, artifactId, artifactType, artifactMetaDataDto, version, versionContent, versionMetaData, versionBranches); + .contentType(contentType).content(handle).references(references).build() + : null; + return storage.createArtifact(groupId, artifactId, artifactType, artifactMetaDataDto, version, + versionContent, versionMetaData, versionBranches); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifactVersion7Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifactVersion7Message.java index d77aadb2c1..5e006b7ba2 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifactVersion7Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/CreateArtifactVersion7Message.java @@ -1,7 +1,5 @@ package io.apicurio.registry.storage.impl.kafkasql.messages; -import java.util.List; - import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.storage.dto.ArtifactReferenceDto; @@ -16,6 +14,8 @@ import lombok.Setter; import lombok.ToString; +import java.util.List; + @NoArgsConstructor @AllArgsConstructor @Builder @@ -41,12 +41,11 @@ public class CreateArtifactVersion7Message extends AbstractMessage { @Override public Object dispatchTo(RegistryStorage storage) { ContentHandle handle = content != null ? ContentHandle.create(content) : null; - ContentWrapperDto contentDto = content != null ? ContentWrapperDto.builder() - .contentType(contentType) - .content(handle) - .references(references) - .build() : null; - return storage.createArtifactVersion(groupId, artifactId, version, artifactType, contentDto, metaData, branches); + ContentWrapperDto contentDto = content != null ? ContentWrapperDto.builder().contentType(contentType) + .content(handle).references(references).build() + : null; + return storage.createArtifactVersion(groupId, artifactId, version, artifactType, contentDto, metaData, + branches); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifact2Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifact2Message.java index af42dd81d7..404dc530d8 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifact2Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifact2Message.java @@ -15,10 +15,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteArtifact2Message extends AbstractMessage { - + private String groupId; private String artifactId; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRule3Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRule3Message.java index ffd3c420c7..f164924c28 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRule3Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRule3Message.java @@ -16,7 +16,7 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteArtifactRule3Message extends AbstractMessage { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRules2Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRules2Message.java index 3f20fad98c..b0c8669c51 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRules2Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactRules2Message.java @@ -15,10 +15,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteArtifactRules2Message extends AbstractMessage { - + private String groupId; private String artifactId; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactVersion3Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactVersion3Message.java index 7b3b7e9ce6..e9fa8292f9 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactVersion3Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifactVersion3Message.java @@ -15,10 +15,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteArtifactVersion3Message extends AbstractMessage { - + private String groupId; private String artifactId; private String version; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifacts1Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifacts1Message.java index 3569215d22..fbef282ab8 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifacts1Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteArtifacts1Message.java @@ -15,10 +15,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteArtifacts1Message extends AbstractMessage { - + private String groupId; /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteConfigProperty1Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteConfigProperty1Message.java index 36d1d9ed33..dc9a9b985a 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteConfigProperty1Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteConfigProperty1Message.java @@ -15,10 +15,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteConfigProperty1Message extends AbstractMessage { - + private String propertyName; /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteGlobalRule1Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteGlobalRule1Message.java index 89d14ee900..18861942e9 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteGlobalRule1Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/DeleteGlobalRule1Message.java @@ -16,10 +16,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class DeleteGlobalRule1Message extends AbstractMessage { - + private RuleType rule; /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ImportGlobalRule1Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ImportGlobalRule1Message.java index 219cbad07f..61c77f4152 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ImportGlobalRule1Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ImportGlobalRule1Message.java @@ -16,10 +16,10 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class ImportGlobalRule1Message extends AbstractMessage { - + private GlobalRuleEntity entity; /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ReplaceBranchVersions3Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ReplaceBranchVersions3Message.java index 41655a3fe3..83ddc3e3ca 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ReplaceBranchVersions3Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/ReplaceBranchVersions3Message.java @@ -35,7 +35,8 @@ public class ReplaceBranchVersions3Message extends AbstractMessage { @Override public Object dispatchTo(RegistryStorage storage) { GA ga = new GA(groupId, artifactId); - storage.replaceBranchVersions(ga, new BranchId(branchId), versions.stream().map(VersionId::new).toList()); + storage.replaceBranchVersions(ga, new BranchId(branchId), + versions.stream().map(VersionId::new).toList()); return null; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/SetConfigProperty1Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/SetConfigProperty1Message.java index 99936c7e03..a0bcff0f9c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/SetConfigProperty1Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/SetConfigProperty1Message.java @@ -16,12 +16,12 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class SetConfigProperty1Message extends AbstractMessage { - + private DynamicConfigPropertyDto propertyDto; - + /** * @see io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessage#dispatchTo(io.apicurio.registry.storage.RegistryStorage) */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/UpdateGlobalRule2Message.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/UpdateGlobalRule2Message.java index 16721a90be..97f3cb9586 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/UpdateGlobalRule2Message.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/messages/UpdateGlobalRule2Message.java @@ -17,12 +17,12 @@ @Builder @Getter @Setter -@EqualsAndHashCode(callSuper=false) +@EqualsAndHashCode(callSuper = false) @ToString public class UpdateGlobalRule2Message extends AbstractMessage { private RuleType rule; private RuleConfigurationDto config; - + /** * @see io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessage#dispatchTo(io.apicurio.registry.storage.RegistryStorage) */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeyDeserializer.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeyDeserializer.java index 90b89d5b09..fec89c7148 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeyDeserializer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeyDeserializer.java @@ -1,15 +1,13 @@ package io.apicurio.registry.storage.impl.kafkasql.serde; -import java.io.IOException; - +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessageKey; import org.apache.kafka.common.serialization.Deserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; - -import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessageKey; +import java.io.IOException; /** * Kafka deserializer responsible for deserializing the key of a KSQL Kafka message. diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeySerializer.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeySerializer.java index 9c62a965d4..43b4aeb0a8 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeySerializer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlKeySerializer.java @@ -1,21 +1,19 @@ package io.apicurio.registry.storage.impl.kafkasql.serde; -import java.io.IOException; -import java.io.UncheckedIOException; - -import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; -import org.apache.kafka.common.serialization.Serializer; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; - import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessageKey; +import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; +import org.apache.kafka.common.serialization.Serializer; + +import java.io.IOException; +import java.io.UncheckedIOException; /** * Responsible for serializing the message key to bytes. */ public class KafkaSqlKeySerializer implements Serializer { - + private static final ObjectMapper mapper = new ObjectMapper(); static { mapper.setSerializationInclusion(Include.NON_NULL); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlMessageIndex.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlMessageIndex.java index c41977694a..a3496aa792 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlMessageIndex.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlMessageIndex.java @@ -58,72 +58,45 @@ @ApplicationScoped public class KafkaSqlMessageIndex { - + private static Map> index = new HashMap<>(); + private static void indexMessageClass(Class mclass) { index.put(mclass.getSimpleName(), mclass); } + @SafeVarargs - private static void indexMessageClasses(Class ... mclass) { + private static void indexMessageClasses(Class... mclass) { for (Class class1 : mclass) { indexMessageClass(class1); } } + static { - indexMessageClasses( - AppendVersionToBranch3Message.class, - ConsumeDownload1Message.class, - CreateArtifact8Message.class, - CreateArtifactVersion7Message.class, - CreateArtifactVersionComment4Message.class, - CreateBranch4Message.class, - CreateDownload1Message.class, - CreateGlobalRule2Message.class, - CreateGroup1Message.class, - CreateRoleMapping3Message.class, - CreateSnapshot1Message.class, - DeleteAllExpiredDownloads0Message.class, - DeleteAllUserData0Message.class, - DeleteArtifact2Message.class, - DeleteArtifactRule3Message.class, - DeleteArtifactRules2Message.class, - DeleteArtifacts1Message.class, - DeleteArtifactVersion3Message.class, - DeleteArtifactVersionComment4Message.class, - DeleteBranch2Message.class, - DeleteConfigProperty1Message.class, - DeleteGlobalRule1Message.class, - DeleteGlobalRules0Message.class, - DeleteGroup1Message.class, - DeleteRoleMapping1Message.class, - ImportArtifact1Message.class, - ImportArtifactRule1Message.class, - ImportArtifactVersion1Message.class, - ImportBranch1Message.class, - ImportComment1Message.class, - ImportContent1Message.class, - ImportGlobalRule1Message.class, - ImportGroup1Message.class, - NextCommentId0Message.class, - NextContentId0Message.class, - NextGlobalId0Message.class, - ReplaceBranchVersions3Message.class, - ResetCommentId0Message.class, - ResetContentId0Message.class, - ResetGlobalId0Message.class, - SetConfigProperty1Message.class, - UpdateArtifactMetaData3Message.class, - UpdateArtifactRule4Message.class, - UpdateArtifactVersionComment5Message.class, - UpdateArtifactVersionMetaData4Message.class, - UpdateBranchMetaData3Message.class, - UpdateContentCanonicalHash3Message.class, - UpdateGlobalRule2Message.class, - UpdateGroupMetaData2Message.class, - UpdateRoleMapping2Message.class - ); + indexMessageClasses(AppendVersionToBranch3Message.class, ConsumeDownload1Message.class, + CreateArtifact8Message.class, CreateArtifactVersion7Message.class, + CreateArtifactVersionComment4Message.class, CreateBranch4Message.class, + CreateDownload1Message.class, CreateGlobalRule2Message.class, CreateGroup1Message.class, + CreateRoleMapping3Message.class, CreateSnapshot1Message.class, + DeleteAllExpiredDownloads0Message.class, DeleteAllUserData0Message.class, + DeleteArtifact2Message.class, DeleteArtifactRule3Message.class, + DeleteArtifactRules2Message.class, DeleteArtifacts1Message.class, + DeleteArtifactVersion3Message.class, DeleteArtifactVersionComment4Message.class, + DeleteBranch2Message.class, DeleteConfigProperty1Message.class, + DeleteGlobalRule1Message.class, DeleteGlobalRules0Message.class, DeleteGroup1Message.class, + DeleteRoleMapping1Message.class, ImportArtifact1Message.class, + ImportArtifactRule1Message.class, ImportArtifactVersion1Message.class, + ImportBranch1Message.class, ImportComment1Message.class, ImportContent1Message.class, + ImportGlobalRule1Message.class, ImportGroup1Message.class, NextCommentId0Message.class, + NextContentId0Message.class, NextGlobalId0Message.class, ReplaceBranchVersions3Message.class, + ResetCommentId0Message.class, ResetContentId0Message.class, ResetGlobalId0Message.class, + SetConfigProperty1Message.class, UpdateArtifactMetaData3Message.class, + UpdateArtifactRule4Message.class, UpdateArtifactVersionComment5Message.class, + UpdateArtifactVersionMetaData4Message.class, UpdateBranchMetaData3Message.class, + UpdateContentCanonicalHash3Message.class, UpdateGlobalRule2Message.class, + UpdateGroupMetaData2Message.class, UpdateRoleMapping2Message.class); } - + public static Class lookup(String name) { return index.get(name); } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlPartitioner.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlPartitioner.java index 9cf60f6dc7..878d8b773c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlPartitioner.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlPartitioner.java @@ -1,33 +1,30 @@ package io.apicurio.registry.storage.impl.kafkasql.serde; +import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessageKey; import org.apache.kafka.clients.producer.Partitioner; import org.apache.kafka.clients.producer.internals.StickyPartitionCache; import org.apache.kafka.common.Cluster; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.utils.Utils; -import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessageKey; - import java.util.List; import java.util.Map; /** - * A custom Kafka partitioner that uses the ArtifactId (when available) as the key to proper - * partitioning. The ArtifactId is extractable from the key in most cases. For some keys - * (e.g. global rule related messages) no ArtifactId is available. In those cases, a constant - * unique string is used instead, which ensures that those messages are all put on the same - * partition. - * + * A custom Kafka partitioner that uses the ArtifactId (when available) as the key to proper partitioning. The + * ArtifactId is extractable from the key in most cases. For some keys (e.g. global rule related messages) no + * ArtifactId is available. In those cases, a constant unique string is used instead, which ensures that those + * messages are all put on the same partition. */ public class KafkaSqlPartitioner implements Partitioner { private final StickyPartitionCache stickyPartitionCache = new StickyPartitionCache(); - public void configure(Map configs) {} + public void configure(Map configs) { + } /** - * Compute the partition for the given record. Do this by extracting the ArtifactId from the - * key object. + * Compute the partition for the given record. Do this by extracting the ArtifactId from the key object. * * @param topic The topic name * @param key The key to partition on (or null if no key) @@ -36,25 +33,27 @@ public void configure(Map configs) {} * @param valueBytes serialized value to partition on or null * @param cluster The current cluster metadata */ - public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { + public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, + Cluster cluster) { if (keyBytes == null) { return stickyPartitionCache.partition(topic, cluster); } - + List partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); - + // hash the partition key to choose a partition KafkaSqlMessageKey msgKey = (KafkaSqlMessageKey) key; String partitionKey = msgKey.getPartitionKey(); return Utils.toPositive(Utils.murmur2(partitionKey.getBytes())) % numPartitions; } - public void close() {} - + public void close() { + } + /** - * If a batch completed for the current sticky partition, change the sticky partition. - * Alternately, if no sticky partition has been determined, set one. + * If a batch completed for the current sticky partition, change the sticky partition. Alternately, if no + * sticky partition has been determined, set one. */ public void onNewBatch(String topic, Cluster cluster, int prevPartition) { stickyPartitionCache.nextPartition(topic, cluster, prevPartition); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueDeserializer.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueDeserializer.java index 7215195528..05fbd888a4 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueDeserializer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueDeserializer.java @@ -34,7 +34,8 @@ public KafkaSqlMessage deserialize(String topic, byte[] data) { } /** - * @see org.apache.kafka.common.serialization.Deserializer#deserialize(java.lang.String, org.apache.kafka.common.header.Headers, byte[]) + * @see org.apache.kafka.common.serialization.Deserializer#deserialize(java.lang.String, + * org.apache.kafka.common.header.Headers, byte[]) */ @Override public KafkaSqlMessage deserialize(String topic, Headers headers, byte[] data) { @@ -63,17 +64,12 @@ public KafkaSqlMessage deserialize(String topic, Headers headers, byte[] data) { } /** - * Extracts the UUID from the message. The UUID should be found in a message header. + * Extracts the UUID from the message. The UUID should be found in a message header. */ private static String extractMessageType(Headers headers) { - return Optional.ofNullable(headers.headers("mt")) - .map(Iterable::iterator) - .map(it -> { - return it.hasNext() ? it.next() : null; - }) - .map(Header::value) - .map(String::new) - .orElse(null); + return Optional.ofNullable(headers.headers("mt")).map(Iterable::iterator).map(it -> { + return it.hasNext() ? it.next() : null; + }).map(Header::value).map(String::new).orElse(null); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueSerializer.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueSerializer.java index c8aee9f3e8..cc05039214 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueSerializer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/serde/KafkaSqlValueSerializer.java @@ -1,15 +1,13 @@ package io.apicurio.registry.storage.impl.kafkasql.serde; -import java.io.IOException; -import java.io.UncheckedIOException; - -import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; -import org.apache.kafka.common.serialization.Serializer; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; - import io.apicurio.registry.storage.impl.kafkasql.KafkaSqlMessage; +import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; +import org.apache.kafka.common.serialization.Serializer; + +import java.io.IOException; +import java.io.UncheckedIOException; /** * Responsible for serializing the message key to bytes. diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/sql/KafkaSqlSink.java b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/sql/KafkaSqlSink.java index 113b87f394..8f05c578c6 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/sql/KafkaSqlSink.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/kafkasql/sql/KafkaSqlSink.java @@ -31,13 +31,13 @@ public class KafkaSqlSink { SqlRegistryStorage sqlStore; /** - * Called by the {@link KafkaSqlRegistryStorage} main Kafka consumer loop to process a single - * message in the topic. Each message represents some attempt to modify the registry data. So - * each message much be consumed and applied to the in-memory SQL data store. + * Called by the {@link KafkaSqlRegistryStorage} main Kafka consumer loop to process a single message in + * the topic. Each message represents some attempt to modify the registry data. So each message much be + * consumed and applied to the in-memory SQL data store. *

- * This method extracts the UUID from the message headers, delegates the message processing - * to doProcessMessage(), and handles any exceptions that might occur. Finally - * it will report the result to any local threads that may be waiting (via the coordinator). + * This method extracts the UUID from the message headers, delegates the message processing to + * doProcessMessage(), and handles any exceptions that might occur. Finally it will report + * the result to any local threads that may be waiting (via the coordinator). * * @param record */ @@ -48,7 +48,9 @@ public void processMessage(ConsumerRecord r try { Object result = doProcessMessage(record); - log.trace("Processed message key: {} value: {} result: {}", record.key().getMessageType(), record.value() != null ? record.value().toString() : "", result != null ? result.toString() : ""); + log.trace("Processed message key: {} value: {} result: {}", record.key().getMessageType(), + record.value() != null ? record.value().toString() : "", + result != null ? result.toString() : ""); log.debug("Kafka message successfully processed. Notifying listeners of response."); coordinator.notifyResponse(requestId, result); } catch (RegistryException e) { @@ -56,30 +58,25 @@ public void processMessage(ConsumerRecord r coordinator.notifyResponse(requestId, e); } catch (Throwable e) { log.debug("Unexpected exception detected: {}", e.getMessage()); - coordinator.notifyResponse(requestId, new RegistryException(e)); // TODO: Any exception (no wrapping) + coordinator.notifyResponse(requestId, new RegistryException(e)); // TODO: Any exception (no + // wrapping) } } /** - * Extracts the UUID from the message. The UUID should be found in a message header. + * Extracts the UUID from the message. The UUID should be found in a message header. * * @param record */ private UUID extractUuid(ConsumerRecord record) { - return Optional.ofNullable(record.headers().headers("req")) - .map(Iterable::iterator) - .map(it -> { - return it.hasNext() ? it.next() : null; - }) - .map(Header::value) - .map(String::new) - .map(UUID::fromString) - .orElse(null); + return Optional.ofNullable(record.headers().headers("req")).map(Iterable::iterator).map(it -> { + return it.hasNext() ? it.next() : null; + }).map(Header::value).map(String::new).map(UUID::fromString).orElse(null); } /** - * Process the message and return a result. This method may also throw an exception if something - * goes wrong. + * Process the message and return a result. This method may also throw an exception if something goes + * wrong. * * @param record */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractHandleFactory.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractHandleFactory.java index b6031510a8..2fbcbf0919 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractHandleFactory.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractHandleFactory.java @@ -33,9 +33,9 @@ protected void initialize(AgroalDataSource dataSource, String dataSourceId, Logg @Override public R withHandle(HandleCallback callback) throws X { /* - * Handles are cached and reused if calls to this method are nested. - * Make sure that all nested uses of a handle are either within a transaction context, - * or without one. Starting a transaction with a nested handle will cause an exception. + * Handles are cached and reused if calls to this method are nested. Make sure that all nested uses of + * a handle are either within a transaction context, or without one. Starting a transaction with a + * nested handle will cause an exception. */ try { if (get().handle == null) { @@ -65,7 +65,6 @@ public R withHandle(HandleCallback callback) thro } } - @Override public R withHandleNoException(HandleCallback callback) { try { @@ -77,7 +76,6 @@ public R withHandleNoException(HandleCallback cal } } - @Override public void withHandleNoException(HandleAction action) { withHandleNoException(handle -> { @@ -86,12 +84,10 @@ public void withHandleNoException(HandleAction action) }); } - private LocalState get() { return local.get().computeIfAbsent(dataSourceId, k -> new LocalState()); } - private static class LocalState { Handle handle; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractSqlRegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractSqlRegistryStorage.java index 52b940d02c..56c7c94dc6 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractSqlRegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/AbstractSqlRegistryStorage.java @@ -141,15 +141,15 @@ import static io.apicurio.registry.utils.StringUtil.limitStr; import static java.util.stream.Collectors.toList; - /** - * A SQL implementation of the {@link RegistryStorage} interface. This impl does not - * use any ORM technology - it simply uses native SQL for all operations. + * A SQL implementation of the {@link RegistryStorage} interface. This impl does not use any ORM technology - + * it simply uses native SQL for all operations. */ public abstract class AbstractSqlRegistryStorage implements RegistryStorage { - private static int DB_VERSION = Integer.valueOf( - IoUtil.toString(AbstractSqlRegistryStorage.class.getResourceAsStream("db-version"))).intValue(); + private static int DB_VERSION = Integer + .valueOf(IoUtil.toString(AbstractSqlRegistryStorage.class.getResourceAsStream("db-version"))) + .intValue(); private static final Object inmemorySequencesMutex = new Object(); private static final ObjectMapper mapper = new ObjectMapper(); @@ -206,10 +206,11 @@ protected SqlStatements sqlStatements() { /** * @param emitStorageReadyEvent The concrete implementation needs to tell AbstractSqlRegistryStorage - * whether it should fire {@see io.apicurio.registry.storage.StorageEvent} in addition to - * {@see io.apicurio.registry.storage.impl.sql.SqlStorageEvent}. Multiple storage implementations - * may be present at the same time (in particular when using KafkaSQL persistence), - * but only the single {@see io.apicurio.registry.types.Current} one may fire the former event. + * whether it should fire {@see io.apicurio.registry.storage.StorageEvent} in addition to + * {@see io.apicurio.registry.storage.impl.sql.SqlStorageEvent}. Multiple storage + * implementations may be present at the same time (in particular when using KafkaSQL + * persistence), but only the single {@see io.apicurio.registry.types.Current} one may fire the + * former event. */ @Transactional protected void initialize(HandleFactory handleFactory, boolean emitStorageReadyEvent) { @@ -232,12 +233,14 @@ protected void initialize(HandleFactory handleFactory, boolean emitStorageReadyE } } else { if (!isDatabaseInitialized(handle)) { - log.error("Database not initialized. Please use the DDL scripts to initialize the database before starting the application."); + log.error( + "Database not initialized. Please use the DDL scripts to initialize the database before starting the application."); throw new RuntimeException("Database not initialized."); } if (!isDatabaseCurrent(handle)) { - log.error("Detected an old version of the database. Please use the DDL upgrade scripts to bring your database up to date."); + log.error( + "Detected an old version of the database. Please use the DDL upgrade scripts to bring your database up to date."); throw new RuntimeException("Database not upgraded."); } } @@ -249,19 +252,15 @@ protected void initialize(HandleFactory handleFactory, boolean emitStorageReadyE initializeEvent.setType(SqlStorageEventType.READY); sqlStorageEvent.fire(initializeEvent); if (emitStorageReadyEvent) { - /* In cases where the observer of the event also injects the source bean, - * such as the io.apicurio.registry.ImportLifecycleBean, - * a kind of recursion may happen. - * This is because the event is fired in the @PostConstruct method, - * and is being processed in the same thread. - * We avoid this by processing the event asynchronously. - * Note that this requires the jakarta.enterprise.event.ObservesAsync - * annotation on the receiving side. If this becomes cumbersome, - * try using ManagedExecutor. + /* + * In cases where the observer of the event also injects the source bean, such as the + * io.apicurio.registry.ImportLifecycleBean, a kind of recursion may happen. This is because the + * event is fired in the @PostConstruct method, and is being processed in the same thread. We + * avoid this by processing the event asynchronously. Note that this requires the + * jakarta.enterprise.event.ObservesAsync annotation on the receiving side. If this becomes + * cumbersome, try using ManagedExecutor. */ - storageEvent.fireAsync(StorageEvent.builder() - .type(StorageEventType.READY) - .build()); + storageEvent.fireAsync(StorageEvent.builder().type(StorageEventType.READY).build()); } } @@ -309,8 +308,7 @@ private void initializeDatabase(Handle handle) { } /** - * Upgrades the database by executing a number of DDL statements found in DB-specific - * DDL upgrade scripts. + * Upgrades the database by executing a number of DDL statements found in DB-specific DDL upgrade scripts. */ private void upgradeDatabase(Handle handle) { log.info("Upgrading the Apicurio Hub API database."); @@ -338,9 +336,8 @@ private void upgradeDatabase(Handle handle) { } /** - * Instantiates an instance of the given upgrader class and then invokes it. Used to perform - * advanced upgrade logic when upgrading the DB (logic that cannot be handled in simple SQL - * statements). + * Instantiates an instance of the given upgrader class and then invokes it. Used to perform advanced + * upgrade logic when upgrading the DB (logic that cannot be handled in simple SQL statements). * * @param handle * @param cname @@ -361,10 +358,8 @@ private void applyUpgrader(Handle handle, String cname) { */ private int getDatabaseVersion(Handle handle) { try { - int version = handle.createQuery(this.sqlStatements.getDatabaseVersion()) - .bind(0, "db_version") - .mapTo(Integer.class) - .one(); + int version = handle.createQuery(this.sqlStatements.getDatabaseVersion()).bind(0, "db_version") + .mapTo(Integer.class).one(); return version; } catch (Exception e) { log.error("Error getting DB version.", e); @@ -382,7 +377,9 @@ public boolean isAlive() { if (!isReady) { return false; } - if (Instant.now().isAfter(isAliveLastCheck.plus(Duration.ofSeconds(2)))) { // Tradeoff between reducing load on the DB and responsiveness: 2s + if (Instant.now().isAfter(isAliveLastCheck.plus(Duration.ofSeconds(2)))) { // Tradeoff between + // reducing load on the DB + // and responsiveness: 2s isAliveLastCheck = Instant.now(); try { getGlobalRules(); @@ -394,53 +391,45 @@ public boolean isAlive() { return isAliveCached; } - @Override public boolean isReadOnly() { return false; } - @Override public String storageName() { return "sql"; } - @Override @Transactional - public ContentWrapperDto getContentById(long contentId) throws ContentNotFoundException, RegistryStorageException { + public ContentWrapperDto getContentById(long contentId) + throws ContentNotFoundException, RegistryStorageException { return handles.withHandleNoException(handle -> { Optional res = handle.createQuery(sqlStatements().selectContentById()) - .bind(0, contentId) - .map(ContentMapper.instance) - .findFirst(); + .bind(0, contentId).map(ContentMapper.instance).findFirst(); return res.orElseThrow(() -> new ContentNotFoundException(contentId)); }); } - @Override @Transactional - public ContentWrapperDto getContentByHash(String contentHash) throws ContentNotFoundException, RegistryStorageException { + public ContentWrapperDto getContentByHash(String contentHash) + throws ContentNotFoundException, RegistryStorageException { return handles.withHandleNoException(handle -> { Optional res = handle.createQuery(sqlStatements().selectContentByContentHash()) - .bind(0, contentHash) - .map(ContentMapper.instance) - .findFirst(); + .bind(0, contentHash).map(ContentMapper.instance).findFirst(); return res.orElseThrow(() -> new ContentNotFoundException(contentHash)); }); } - @Override @Transactional public List getArtifactVersionsByContentId(long contentId) { return handles.withHandleNoException(handle -> { - List dtos = handle.createQuery(sqlStatements().selectArtifactVersionMetaDataByContentId()) - .bind(0, contentId) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .list(); + List dtos = handle + .createQuery(sqlStatements().selectArtifactVersionMetaDataByContentId()) + .bind(0, contentId).map(ArtifactVersionMetaDataDtoMapper.instance).list(); if (dtos.isEmpty()) { throw new ContentNotFoundException(contentId); } @@ -456,37 +445,30 @@ public List getEnabledArtifactContentIds(String groupId, String artifactId return handles.withHandleNoException(handle -> { String sql = sqlStatements().selectArtifactContentIds(); - return handle.createQuery(sql) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .mapTo(Long.class) - .list(); + return handle.createQuery(sql).bind(0, normalizeGroupId(groupId)).bind(1, artifactId) + .mapTo(Long.class).list(); }); } - @Override @Transactional - public Pair createArtifact(String groupId, String artifactId, String artifactType, - EditableArtifactMetaDataDto artifactMetaData, String version, ContentWrapperDto versionContent, - EditableVersionMetaDataDto versionMetaData, List versionBranches) throws RegistryStorageException { + public Pair createArtifact(String groupId, + String artifactId, String artifactType, EditableArtifactMetaDataDto artifactMetaData, + String version, ContentWrapperDto versionContent, EditableVersionMetaDataDto versionMetaData, + List versionBranches) throws RegistryStorageException { log.debug("Inserting an artifact row for: {} {}", groupId, artifactId); String owner = securityIdentity.getPrincipal().getName(); Date createdOn = new Date(); - EditableArtifactMetaDataDto amd = artifactMetaData == null ? EditableArtifactMetaDataDto.builder().build() : artifactMetaData; + EditableArtifactMetaDataDto amd = artifactMetaData == null + ? EditableArtifactMetaDataDto.builder().build() : artifactMetaData; // Create the group if it doesn't exist yet. if (groupId != null && !isGroupExists(groupId)) { - //Only create group metadata for non-default groups. - createGroup(GroupMetaDataDto.builder() - .groupId(groupId) - .createdOn(createdOn.getTime()) - .modifiedOn(createdOn.getTime()) - .owner(owner) - .modifiedBy(owner) - .build()); + // Only create group metadata for non-default groups. + createGroup(GroupMetaDataDto.builder().groupId(groupId).createdOn(createdOn.getTime()) + .modifiedOn(createdOn.getTime()).owner(owner).modifiedBy(owner).build()); } try { @@ -495,52 +477,37 @@ public Pair createArtifact(Stri String labelsStr = SqlUtil.serializeLabels(labels); // Create a row in the artifacts table. - handle.createUpdate(sqlStatements.insertArtifact()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, artifactType) - .bind(3, owner) - .bind(4, createdOn) + handle.createUpdate(sqlStatements.insertArtifact()).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).bind(2, artifactType).bind(3, owner).bind(4, createdOn) .bind(5, owner) // modifiedBy .bind(6, createdOn) // modifiedOn .bind(7, limitStr(amd.getName(), 512)) - .bind(8, limitStr(amd.getDescription(), 1024, true)) - .bind(9, labelsStr) - .execute(); + .bind(8, limitStr(amd.getDescription(), 1024, true)).bind(9, labelsStr).execute(); // Insert labels into the "artifact_labels" table if (labels != null && !labels.isEmpty()) { labels.forEach((k, v) -> { handle.createUpdate(sqlStatements.insertArtifactLabel()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId) .bind(2, limitStr(k.toLowerCase(), 256)) - .bind(3, limitStr(v.toLowerCase(), 512)) - .execute(); + .bind(3, limitStr(v.toLowerCase(), 512)).execute(); }); } // Return an artifact metadata dto - ArtifactMetaDataDto amdDto = ArtifactMetaDataDto.builder() - .groupId(groupId) - .artifactId(artifactId) - .name(amd.getName()) - .description(amd.getDescription()) - .createdOn(createdOn.getTime()) - .owner(owner) - .modifiedOn(createdOn.getTime()) - .modifiedBy(owner) - .artifactType(artifactType) - .labels(labels) - .build(); - - // The artifact was successfully created! Create the version as well, if one was included. + ArtifactMetaDataDto amdDto = ArtifactMetaDataDto.builder().groupId(groupId) + .artifactId(artifactId).name(amd.getName()).description(amd.getDescription()) + .createdOn(createdOn.getTime()).owner(owner).modifiedOn(createdOn.getTime()) + .modifiedBy(owner).artifactType(artifactType).labels(labels).build(); + + // The artifact was successfully created! Create the version as well, if one was included. if (versionContent != null) { // Put the content in the DB and get the unique content ID back. long contentId = getOrCreateContent(handle, artifactType, versionContent); - ArtifactVersionMetaDataDto vmdDto = createArtifactVersionRaw(handle, true, groupId, artifactId, version, - versionMetaData, owner, createdOn, contentId, versionBranches); + ArtifactVersionMetaDataDto vmdDto = createArtifactVersionRaw(handle, true, groupId, + artifactId, version, versionMetaData, owner, createdOn, contentId, + versionBranches); return ImmutablePair.of(amdDto, vmdDto); } else { @@ -555,13 +522,12 @@ public Pair createArtifact(Stri } } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ - private ArtifactVersionMetaDataDto createArtifactVersionRaw(Handle handle, boolean firstVersion, String groupId, - String artifactId, String version, EditableVersionMetaDataDto metaData, String owner, Date createdOn, Long contentId, - List branches) { + private ArtifactVersionMetaDataDto createArtifactVersionRaw(Handle handle, boolean firstVersion, + String groupId, String artifactId, String version, EditableVersionMetaDataDto metaData, + String owner, Date createdOn, Long contentId, List branches) { if (metaData == null) { metaData = EditableVersionMetaDataDto.builder().build(); } @@ -578,48 +544,28 @@ private ArtifactVersionMetaDataDto createArtifactVersionRaw(Handle handle, boole version = "1"; } final String finalVersion1 = version; // Lambda requirement - handle.createUpdate(sqlStatements.insertVersion(true)) - .bind(0, globalId) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, finalVersion1) - .bind(4, state) - .bind(5, limitStr(metaData.getName(), 512)) - .bind(6, limitStr(metaData.getDescription(), 1024, true)) - .bind(7, owner) - .bind(8, createdOn) - .bind(9, owner) - .bind(10, createdOn) - .bind(11, labelsStr) - .bind(12, contentId) - .execute(); + handle.createUpdate(sqlStatements.insertVersion(true)).bind(0, globalId) + .bind(1, normalizeGroupId(groupId)).bind(2, artifactId).bind(3, finalVersion1) + .bind(4, state).bind(5, limitStr(metaData.getName(), 512)) + .bind(6, limitStr(metaData.getDescription(), 1024, true)).bind(7, owner) + .bind(8, createdOn).bind(9, owner).bind(10, createdOn).bind(11, labelsStr) + .bind(12, contentId).execute(); gav = new GAV(groupId, artifactId, finalVersion1); createOrUpdateBranchRaw(handle, gav, BranchId.LATEST, true); } else { - handle.createUpdate(sqlStatements.insertVersion(false)) - .bind(0, globalId) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, version) - .bind(4, normalizeGroupId(groupId)) - .bind(5, artifactId) - .bind(6, state) + handle.createUpdate(sqlStatements.insertVersion(false)).bind(0, globalId) + .bind(1, normalizeGroupId(groupId)).bind(2, artifactId).bind(3, version) + .bind(4, normalizeGroupId(groupId)).bind(5, artifactId).bind(6, state) .bind(7, limitStr(metaData.getName(), 512)) - .bind(8, limitStr(metaData.getDescription(), 1024, true)) - .bind(9, owner) - .bind(10, createdOn) - .bind(11, owner) - .bind(12, createdOn) - .bind(13, labelsStr) - .bind(14, contentId) - .execute(); + .bind(8, limitStr(metaData.getDescription(), 1024, true)).bind(9, owner) + .bind(10, createdOn).bind(11, owner).bind(12, createdOn).bind(13, labelsStr) + .bind(14, contentId).execute(); - // If version is null, update the row we just inserted to set the version to the generated versionOrder + // If version is null, update the row we just inserted to set the version to the generated + // versionOrder if (version == null) { - handle.createUpdate(sqlStatements.autoUpdateVersionForGlobalId()) - .bind(0, globalId) - .execute(); + handle.createUpdate(sqlStatements.autoUpdateVersionForGlobalId()).bind(0, globalId).execute(); } gav = getGAVByGlobalId(globalId); @@ -629,10 +575,8 @@ private ArtifactVersionMetaDataDto createArtifactVersionRaw(Handle handle, boole // Insert labels into the "version_labels" table if (metaData.getLabels() != null && !metaData.getLabels().isEmpty()) { metaData.getLabels().forEach((k, v) -> { - handle.createUpdate(sqlStatements.insertVersionLabel()) - .bind(0, globalId) - .bind(1, limitStr(k.toLowerCase(), 256)) - .bind(2, limitStr(v.toLowerCase(), 512)) + handle.createUpdate(sqlStatements.insertVersionLabel()).bind(0, globalId) + .bind(1, limitStr(k.toLowerCase(), 256)).bind(2, limitStr(v.toLowerCase(), 512)) .execute(); }); } @@ -645,16 +589,13 @@ private ArtifactVersionMetaDataDto createArtifactVersionRaw(Handle handle, boole }); } - return handle.createQuery(sqlStatements.selectArtifactVersionMetaDataByGlobalId()) - .bind(0, globalId) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .one(); + return handle.createQuery(sqlStatements.selectArtifactVersionMetaDataByGlobalId()).bind(0, globalId) + .map(ArtifactVersionMetaDataDtoMapper.instance).one(); } - /** - * Store the content in the database and return the content ID of the new row. - * If the content already exists, just return the content ID of the existing row. + * Store the content in the database and return the content ID of the new row. If the content already + * exists, just return the content ID of the existing row. *

* IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ @@ -663,21 +604,18 @@ private Long getOrCreateContent(Handle handle, String artifactType, ContentWrapp TypedContent content = TypedContent.create(contentDto.getContent(), contentDto.getContentType()); if (notEmpty(references)) { - return getOrCreateContentRaw(handle, content, - utils.getContentHash(content, references), + return getOrCreateContentRaw(handle, content, utils.getContentHash(content, references), utils.getCanonicalContentHash(content, artifactType, references, this::resolveReferences), references, SqlUtil.serializeReferences(references)); } else { - return getOrCreateContentRaw(handle, content, - utils.getContentHash(content, null), - utils.getCanonicalContentHash(content, artifactType, null, null), - null, null); + return getOrCreateContentRaw(handle, content, utils.getContentHash(content, null), + utils.getCanonicalContentHash(content, artifactType, null, null), null, null); } } /** - * Store the content in the database and return the content ID of the new row. - * If the content already exists, just return the content ID of the existing row. + * Store the content in the database and return the content ID of the new row. If the content already + * exists, just return the content ID of the existing row. *

* IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ @@ -685,21 +623,16 @@ private Long getOrCreateContentRaw(Handle handle, TypedContent content, String c String canonicalContentHash, List references, String referencesSerialized) { byte[] contentBytes = content.getContent().bytes(); - // Upsert a row in the "content" table. This will insert a row for the content - // if a row doesn't already exist. We use the content hash to determine whether - // a row for this content already exists. If we find a row we return its content ID. + // Upsert a row in the "content" table. This will insert a row for the content + // if a row doesn't already exist. We use the content hash to determine whether + // a row for this content already exists. If we find a row we return its content ID. // If we don't find a row, we insert one and then return its content ID. Long contentId; boolean insertReferences = true; if (Set.of("mssql", "postgresql").contains(sqlStatements.dbType())) { - handle.createUpdate(sqlStatements.upsertContent()) - .bind(0, nextContentId()) - .bind(1, canonicalContentHash) - .bind(2, contentHash) - .bind(3, content.getContentType()) - .bind(4, contentBytes) - .bind(5, referencesSerialized) - .execute(); + handle.createUpdate(sqlStatements.upsertContent()).bind(0, nextContentId()) + .bind(1, canonicalContentHash).bind(2, contentHash).bind(3, content.getContentType()) + .bind(4, contentBytes).bind(5, referencesSerialized).execute(); contentId = contentIdFromHash(contentHash) .orElseThrow(() -> new RegistryStorageException("Content hash not found.")); @@ -708,17 +641,12 @@ private Long getOrCreateContentRaw(Handle handle, TypedContent content, String c if (contentIdOptional.isPresent()) { contentId = contentIdOptional.get(); - //If the content is already present there's no need to create the references. + // If the content is already present there's no need to create the references. insertReferences = false; } else { - handle.createUpdate(sqlStatements.upsertContent()) - .bind(0, nextContentId()) - .bind(1, canonicalContentHash) - .bind(2, contentHash) - .bind(3, content.getContentType()) - .bind(4, contentBytes) - .bind(5, referencesSerialized) - .execute(); + handle.createUpdate(sqlStatements.upsertContent()).bind(0, nextContentId()) + .bind(1, canonicalContentHash).bind(2, contentHash).bind(3, content.getContentType()) + .bind(4, contentBytes).bind(5, referencesSerialized).execute(); contentId = contentIdFromHash(contentHash) .orElseThrow(() -> new RegistryStorageException("Content hash not found.")); @@ -728,13 +656,13 @@ private Long getOrCreateContentRaw(Handle handle, TypedContent content, String c } if (insertReferences) { - //Finally, insert references into the "content_references" table if the content wasn't present yet. + // Finally, insert references into the "content_references" table if the content wasn't present + // yet. insertReferences(handle, contentId, references); } return contentId; } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ @@ -742,16 +670,13 @@ private void insertReferences(Handle handle, Long contentId, List { try { - handle.createUpdate(sqlStatements.upsertContentReference()) - .bind(0, contentId) + handle.createUpdate(sqlStatements.upsertContentReference()).bind(0, contentId) .bind(1, normalizeGroupId(reference.getGroupId())) - .bind(2, reference.getArtifactId()) - .bind(3, reference.getVersion()) - .bind(4, reference.getName()) - .execute(); + .bind(2, reference.getArtifactId()).bind(3, reference.getVersion()) + .bind(4, reference.getName()).execute(); } catch (Exception e) { if (sqlStatements.isPrimaryKeyViolation(e)) { - //Do nothing, the reference already exist, only needed for H2 + // Do nothing, the reference already exist, only needed for H2 } else { throw e; } @@ -760,7 +685,6 @@ private void insertReferences(Handle handle, Long contentId, List deleteArtifact(String groupId, String artifactId) @@ -769,23 +693,16 @@ public List deleteArtifact(String groupId, String artifactId) return handles.withHandle(handle -> { // Get the list of versions of the artifact (will be deleted) List versions = handle.createQuery(sqlStatements.selectArtifactVersions()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .mapTo(String.class) - .list(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).mapTo(String.class).list(); - // Note: delete artifact rules as well. Artifact rules are not set to cascade on delete + // Note: delete artifact rules as well. Artifact rules are not set to cascade on delete // because the Confluent API allows users to configure rules for artifacts that don't exist. :( - handle.createUpdate(sqlStatements.deleteArtifactRules()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .execute(); + handle.createUpdate(sqlStatements.deleteArtifactRules()).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).execute(); // Delete artifact row (should be just one) int rowCount = handle.createUpdate(sqlStatements.deleteArtifact()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .execute(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).execute(); if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -797,22 +714,19 @@ public List deleteArtifact(String groupId, String artifactId) }); } - @Override @Transactional public void deleteArtifacts(String groupId) throws RegistryStorageException { log.debug("Deleting all artifacts in group: {}", groupId); handles.withHandle(handle -> { - // Note: delete artifact rules separately. Artifact rules are not set to cascade on delete + // Note: delete artifact rules separately. Artifact rules are not set to cascade on delete // because the Confluent API allows users to configure rules for artifacts that don't exist. :( handle.createUpdate(sqlStatements.deleteArtifactRulesByGroupId()) - .bind(0, normalizeGroupId(groupId)) - .execute(); + .bind(0, normalizeGroupId(groupId)).execute(); // Delete all artifacts in the group int rowCount = handle.createUpdate(sqlStatements.deleteArtifactsByGroupId()) - .bind(0, normalizeGroupId(groupId)) - .execute(); + .bind(0, normalizeGroupId(groupId)).execute(); if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, null); @@ -828,8 +742,7 @@ public void deleteArtifacts(String groupId) throws RegistryStorageException { @Transactional public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String artifactId, String version, String artifactType, ContentWrapperDto content, EditableVersionMetaDataDto metaData, - List branches) throws VersionAlreadyExistsException, RegistryStorageException - { + List branches) throws VersionAlreadyExistsException, RegistryStorageException { log.debug("Creating new artifact version for {} {} (version {}).", groupId, artifactId, version); String owner = securityIdentity.getPrincipal().getName(); @@ -844,8 +757,10 @@ public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String a boolean isFirstVersion = countArtifactVersionsRaw(handle, groupId, artifactId) == 0; // Now create the version and return the new version metadata. - ArtifactVersionMetaDataDto versionDto = createArtifactVersionRaw(handle, isFirstVersion, groupId, artifactId, version, - metaData == null ? EditableVersionMetaDataDto.builder().build() : metaData, owner, createdOn, contentId, branches); + ArtifactVersionMetaDataDto versionDto = createArtifactVersionRaw(handle, isFirstVersion, + groupId, artifactId, version, + metaData == null ? EditableVersionMetaDataDto.builder().build() : metaData, owner, + createdOn, contentId, branches); return versionDto; }); } catch (Exception ex) { @@ -858,14 +773,12 @@ public ArtifactVersionMetaDataDto createArtifactVersion(String groupId, String a @Override @Transactional - public long countActiveArtifactVersions(String groupId, String artifactId) throws RegistryStorageException { + public long countActiveArtifactVersions(String groupId, String artifactId) + throws RegistryStorageException { log.debug("Searching for versions of artifact {} {}", groupId, artifactId); return handles.withHandleNoException(handle -> { Integer count = handle.createQuery(sqlStatements.selectActiveArtifactVersionsCount()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .mapTo(Integer.class) - .one(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).mapTo(Integer.class).one(); return count.longValue(); }); @@ -874,7 +787,7 @@ public long countActiveArtifactVersions(String groupId, String artifactId) throw @Override @Transactional public Set getArtifactIds(Integer limit) { // TODO Paging and order by - //Set limit to max integer in case limit is null (not allowed) + // Set limit to max integer in case limit is null (not allowed) final Integer adjustedLimit = limit == null ? Integer.MAX_VALUE : limit; log.debug("Getting the set of all artifact IDs"); return handles.withHandleNoException(handle -> { @@ -884,11 +797,10 @@ public Set getArtifactIds(Integer limit) { // TODO Paging and order by }); } - @Override @Transactional - public ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, OrderDirection orderDirection, - int offset, int limit) { + public ArtifactSearchResultsDto searchArtifacts(Set filters, OrderBy orderBy, + OrderDirection orderDirection, int offset, int limit) { return handles.withHandleNoException(handle -> { List binders = new LinkedList<>(); @@ -939,7 +851,8 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order break; case contentHash: op = filter.isNot() ? "!=" : "="; - where.append("EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); + where.append( + "EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); where.append("c.contentHash " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, filter.getStringValue()); @@ -948,7 +861,8 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order break; case canonicalHash: op = filter.isNot() ? "!=" : "="; - where.append("EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); + where.append( + "EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); where.append("c.canonicalHash " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, filter.getStringValue()); @@ -958,9 +872,11 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order case labels: op = filter.isNot() ? "!=" : "="; Pair label = filter.getLabelFilterValue(); - // Note: convert search to lowercase when searching for labels (case-insensitivity support). + // Note: convert search to lowercase when searching for labels (case-insensitivity + // support). String labelKey = label.getKey().toLowerCase(); - where.append("EXISTS(SELECT l.* FROM artifact_labels l WHERE l.labelKey " + op + " ?"); + where.append( + "EXISTS(SELECT l.* FROM artifact_labels l WHERE l.labelKey " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, labelKey); }); @@ -975,7 +891,8 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order break; case globalId: op = filter.isNot() ? "!=" : "="; - where.append("EXISTS(SELECT v.* FROM versions v WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); + where.append( + "EXISTS(SELECT v.* FROM versions v WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); where.append("v.globalId " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, filter.getNumberValue().longValue()); @@ -984,7 +901,8 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order break; case contentId: op = filter.isNot() ? "!=" : "="; - where.append("EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); + where.append( + "EXISTS(SELECT c.* FROM content c JOIN versions v ON c.contentId = v.contentId WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); where.append("v.contentId " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, filter.getNumberValue().longValue()); @@ -993,7 +911,8 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order break; case state: op = filter.isNot() ? "!=" : "="; - where.append("EXISTS(SELECT v.* FROM versions v WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); + where.append( + "EXISTS(SELECT v.* FROM versions v WHERE v.groupId = a.groupId AND v.artifactId = a.artifactId AND "); where.append("v.state " + op + " ?"); binders.add((query, idx) -> { query.bind(idx, filter.getStringValue()); @@ -1034,16 +953,10 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order } // Query for the artifacts - String artifactsQuerySql = new StringBuilder(selectTemplate) - .append(where) - .append(orderByQuery) - .append(limitOffset) - .toString() - .replace("{{selectColumns}}", "a.*"); + String artifactsQuerySql = new StringBuilder(selectTemplate).append(where).append(orderByQuery) + .append(limitOffset).toString().replace("{{selectColumns}}", "a.*"); Query artifactsQuery = handle.createQuery(artifactsQuerySql); - String countQuerySql = new StringBuilder(selectTemplate) - .append(where) - .toString() + String countQuerySql = new StringBuilder(selectTemplate).append(where).toString() .replace("{{selectColumns}}", "count(a.artifactId)"); Query countQuery = handle.createQuery(countQuerySql); @@ -1076,18 +989,18 @@ public ArtifactSearchResultsDto searchArtifacts(Set filters, Order } @Override - public ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { + public ArtifactMetaDataDto getArtifactMetaData(String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException { log.debug("Selecting artifact meta-data: {} {}", groupId, artifactId); return handles.withHandle(handle -> getArtifactMetaDataRaw(handle, groupId, artifactId)); } - protected ArtifactMetaDataDto getArtifactMetaDataRaw(Handle handle, String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { + protected ArtifactMetaDataDto getArtifactMetaDataRaw(Handle handle, String groupId, String artifactId) + throws ArtifactNotFoundException, RegistryStorageException { Optional res = handle.createQuery(sqlStatements.selectArtifactMetaData()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .map(ArtifactMetaDataDtoMapper.instance) - .findOne(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId) + .map(ArtifactMetaDataDtoMapper.instance).findOne(); return res.orElseThrow(() -> new ArtifactNotFoundException(groupId, artifactId)); } @@ -1096,25 +1009,24 @@ protected ArtifactMetaDataDto getArtifactMetaDataRaw(Handle handle, String group * * @param references may be null */ - private String getContentHash(String groupId, String artifactId, boolean canonical, - TypedContent content, List references) { + private String getContentHash(String groupId, String artifactId, boolean canonical, TypedContent content, + List references) { if (canonical) { var artifactMetaData = getArtifactMetaData(groupId, artifactId); - return utils.getCanonicalContentHash(content, artifactMetaData.getArtifactType(), - references, this::resolveReferences); + return utils.getCanonicalContentHash(content, artifactMetaData.getArtifactType(), references, + this::resolveReferences); } else { return utils.getContentHash(content, references); } } - /** * @param references may be null */ @Override @Transactional - public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, boolean canonical, - TypedContent content, List references) + public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String groupId, String artifactId, + boolean canonical, TypedContent content, List references) throws ArtifactNotFoundException, RegistryStorageException { String hash = getContentHash(groupId, artifactId, canonical, content, references); @@ -1127,19 +1039,16 @@ public ArtifactVersionMetaDataDto getArtifactVersionMetaDataByContent(String gro sql = sqlStatements.selectArtifactVersionMetaDataByContentHash(); } Optional res = handle.createQuery(sql) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, hash) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .findFirst(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, hash) + .map(ArtifactVersionMetaDataDtoMapper.instance).findFirst(); return res.orElseThrow(() -> new ArtifactNotFoundException(groupId, artifactId)); }); } @Override @Transactional - public void updateArtifactMetaData(String groupId, String artifactId, EditableArtifactMetaDataDto metaData) - throws ArtifactNotFoundException, RegistryStorageException { + public void updateArtifactMetaData(String groupId, String artifactId, + EditableArtifactMetaDataDto metaData) throws ArtifactNotFoundException, RegistryStorageException { log.debug("Updating meta-data for an artifact: {} {}", groupId, artifactId); handles.withHandle(handle -> { @@ -1148,10 +1057,8 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr // Update name if (metaData.getName() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactName()) - .bind(0, limitStr(metaData.getName(), 512)) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .execute(); + .bind(0, limitStr(metaData.getName(), 512)).bind(1, normalizeGroupId(groupId)) + .bind(2, artifactId).execute(); modified = true; if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1161,10 +1068,8 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr // Update description if (metaData.getDescription() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactDescription()) - .bind(0, limitStr(metaData.getDescription(), 1024)) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .execute(); + .bind(0, limitStr(metaData.getDescription(), 1024)).bind(1, normalizeGroupId(groupId)) + .bind(2, artifactId).execute(); modified = true; if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1174,9 +1079,7 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr // TODO versions shouldn't have owners, only groups and artifacts? if (metaData.getOwner() != null && !metaData.getOwner().trim().isEmpty()) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactOwner()) - .bind(0, metaData.getOwner()) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) + .bind(0, metaData.getOwner()).bind(1, normalizeGroupId(groupId)).bind(2, artifactId) .execute(); modified = true; if (rowCount == 0) { @@ -1188,31 +1091,24 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr if (metaData.getLabels() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactLabels()) .bind(0, SqlUtil.serializeLabels(metaData.getLabels())) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .execute(); + .bind(1, normalizeGroupId(groupId)).bind(2, artifactId).execute(); modified = true; if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, artifactId); } // Delete all appropriate rows in the "artifact_labels" table - handle.createUpdate(sqlStatements.deleteArtifactLabels()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .execute(); + handle.createUpdate(sqlStatements.deleteArtifactLabels()).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).execute(); // Insert new labels into the "artifact_labels" table Map labels = metaData.getLabels(); if (labels != null && !labels.isEmpty()) { labels.forEach((k, v) -> { String sqli = sqlStatements.insertArtifactLabel(); - handle.createUpdate(sqli) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) + handle.createUpdate(sqli).bind(0, normalizeGroupId(groupId)).bind(1, artifactId) .bind(2, limitStr(k.toLowerCase(), 256)) - .bind(3, limitStr(asLowerCase(v), 512)) - .execute(); + .bind(3, limitStr(asLowerCase(v), 512)).execute(); }); } } @@ -1221,11 +1117,8 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr String modifiedBy = securityIdentity.getPrincipal().getName(); Date modifiedOn = new Date(); int rowCount = handle.createUpdate(sqlStatements.updateArtifactModifiedByOn()) - .bind(0, modifiedBy) - .bind(1, modifiedOn) - .bind(2, normalizeGroupId(groupId)) - .bind(3, artifactId) - .execute(); + .bind(0, modifiedBy).bind(1, modifiedOn).bind(2, normalizeGroupId(groupId)) + .bind(3, artifactId).execute(); modified = true; if (rowCount == 0) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1237,7 +1130,6 @@ public void updateArtifactMetaData(String groupId, String artifactId, EditableAr }); } - @Override @Transactional public List getArtifactRules(String groupId, String artifactId) @@ -1245,15 +1137,12 @@ public List getArtifactRules(String groupId, String artifactId) log.debug("Getting a list of all artifact rules for: {} {}", groupId, artifactId); return handles.withHandle(handle -> { List rules = handle.createQuery(sqlStatements.selectArtifactRules()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .map(new RowMapper() { + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).map(new RowMapper() { @Override public RuleType map(ResultSet rs) throws SQLException { return RuleType.fromValue(rs.getString("type")); } - }) - .list(); + }).list(); if (rules.isEmpty()) { if (!isArtifactExists(groupId, artifactId)) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1263,19 +1152,17 @@ public RuleType map(ResultSet rs) throws SQLException { }); } - @Override @Transactional - public void createArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) + public void createArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleAlreadyExistsException, RegistryStorageException { - log.debug("Inserting an artifact rule row for artifact: {} {} rule: {}", groupId, artifactId, rule.name()); + log.debug("Inserting an artifact rule row for artifact: {} {} rule: {}", groupId, artifactId, + rule.name()); try { handles.withHandle(handle -> { - handle.createUpdate(sqlStatements.insertArtifactRule()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, rule.name()) - .bind(3, config.getConfiguration()) + handle.createUpdate(sqlStatements.insertArtifactRule()).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).bind(2, rule.name()).bind(3, config.getConfiguration()) .execute(); return null; }); @@ -1291,7 +1178,6 @@ public void createArtifactRule(String groupId, String artifactId, RuleType rule, log.debug("Artifact rule row successfully inserted."); } - @Override @Transactional public void deleteArtifactRules(String groupId, String artifactId) @@ -1299,9 +1185,7 @@ public void deleteArtifactRules(String groupId, String artifactId) log.debug("Deleting all artifact rules for artifact: {} {}", groupId, artifactId); handles.withHandle(handle -> { int count = handle.createUpdate(sqlStatements.deleteArtifactRules()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .execute(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).execute(); if (count == 0) { if (!isArtifactExists(groupId, artifactId)) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1311,19 +1195,16 @@ public void deleteArtifactRules(String groupId, String artifactId) }); } - @Override @Transactional public RuleConfigurationDto getArtifactRule(String groupId, String artifactId, RuleType rule) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { - log.debug("Selecting a single artifact rule for artifact: {} {} and rule: {}", groupId, artifactId, rule.name()); + log.debug("Selecting a single artifact rule for artifact: {} {} and rule: {}", groupId, artifactId, + rule.name()); return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectArtifactRuleByType()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, rule.name()) - .map(RuleConfigurationDtoMapper.instance) - .findOne(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, rule.name()) + .map(RuleConfigurationDtoMapper.instance).findOne(); return res.orElseThrow(() -> { if (!isArtifactExists(groupId, artifactId)) { return new ArtifactNotFoundException(groupId, artifactId); @@ -1333,19 +1214,17 @@ public RuleConfigurationDto getArtifactRule(String groupId, String artifactId, R }); } - @Override @Transactional - public void updateArtifactRule(String groupId, String artifactId, RuleType rule, RuleConfigurationDto config) + public void updateArtifactRule(String groupId, String artifactId, RuleType rule, + RuleConfigurationDto config) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { - log.debug("Updating an artifact rule for artifact: {} {} and rule: {}::{}", groupId, artifactId, rule.name(), config.getConfiguration()); + log.debug("Updating an artifact rule for artifact: {} {} and rule: {}::{}", groupId, artifactId, + rule.name(), config.getConfiguration()); handles.withHandle(handle -> { int rowCount = handle.createUpdate(sqlStatements.updateArtifactRule()) - .bind(0, config.getConfiguration()) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, rule.name()) - .execute(); + .bind(0, config.getConfiguration()).bind(1, normalizeGroupId(groupId)).bind(2, artifactId) + .bind(3, rule.name()).execute(); if (rowCount == 0) { if (!isArtifactExists(groupId, artifactId)) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1356,18 +1235,15 @@ public void updateArtifactRule(String groupId, String artifactId, RuleType rule, }); } - @Override @Transactional public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) throws ArtifactNotFoundException, RuleNotFoundException, RegistryStorageException { - log.debug("Deleting an artifact rule for artifact: {} {} and rule: {}", groupId, artifactId, rule.name()); + log.debug("Deleting an artifact rule for artifact: {} {} and rule: {}", groupId, artifactId, + rule.name()); handles.withHandle(handle -> { int rowCount = handle.createUpdate(sqlStatements.deleteArtifactRule()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, rule.name()) - .execute(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, rule.name()).execute(); if (rowCount == 0) { if (!isArtifactExists(groupId, artifactId)) { throw new ArtifactNotFoundException(groupId, artifactId); @@ -1378,15 +1254,14 @@ public void deleteArtifactRule(String groupId, String artifactId, RuleType rule) }); } - @Override @Transactional public List getArtifactVersions(String groupId, String artifactId) throws ArtifactNotFoundException, RegistryStorageException { - return getArtifactVersions(groupId, artifactId, storageBehaviorProps.getDefaultArtifactRetrievalBehavior()); + return getArtifactVersions(groupId, artifactId, + storageBehaviorProps.getDefaultArtifactRetrievalBehavior()); } - @Override public List getArtifactVersions(String groupId, String artifactId, RetrievalBehavior behavior) throws ArtifactNotFoundException, RegistryStorageException { @@ -1398,7 +1273,8 @@ public List getArtifactVersions(String groupId, String artifactId, Retri return getArtifactVersions(groupId, artifactId, sqlStatements.selectArtifactVersions()); } case SKIP_DISABLED_LATEST -> { - return getArtifactVersions(groupId, artifactId, sqlStatements.selectArtifactVersionsNotDisabled()); + return getArtifactVersions(groupId, artifactId, + sqlStatements.selectArtifactVersionsNotDisabled()); } } } catch (BranchNotFoundException ex) { @@ -1411,11 +1287,8 @@ public List getArtifactVersions(String groupId, String artifactId, Strin throws ArtifactNotFoundException, RegistryStorageException { log.debug("Getting a list of versions for artifact: {} {}", groupId, artifactId); return handles.withHandle(handle -> { - List versions = handle.createQuery(sqlStatement) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .map(StringMapper.instance) - .list(); + List versions = handle.createQuery(sqlStatement).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).map(StringMapper.instance).list(); // If there aren't any versions, it might be because the artifact does not exist if (versions.isEmpty()) { @@ -1443,7 +1316,8 @@ public VersionSearchResultsDto searchVersions(Set filters, OrderBy StringBuilder limitOffset = new StringBuilder(); // Formulate the SELECT clause for the query - selectTemplate.append("SELECT {{selectColumns}} FROM versions v JOIN artifacts a ON v.groupId = a.groupId AND v.artifactId = a.artifactId"); + selectTemplate.append( + "SELECT {{selectColumns}} FROM versions v JOIN artifacts a ON v.groupId = a.groupId AND v.artifactId = a.artifactId"); // Formulate the WHERE clause for both queries where.append(" WHERE (1 = 1)"); @@ -1486,7 +1360,8 @@ public VersionSearchResultsDto searchVersions(Set filters, OrderBy case labels: op = filter.isNot() ? "!=" : "="; Pair label = filter.getLabelFilterValue(); - // Note: convert search to lowercase when searching for labels (case-insensitivity support). + // Note: convert search to lowercase when searching for labels (case-insensitivity + // support). String labelKey = label.getKey().toLowerCase(); where.append("EXISTS(SELECT l.* FROM version_labels l WHERE l.labelKey " + op + " ?"); binders.add((query, idx) -> { @@ -1555,17 +1430,11 @@ public VersionSearchResultsDto searchVersions(Set filters, OrderBy } // Query for the versions - String versionsQuerySql = new StringBuilder(selectTemplate) - .append(where) - .append(orderByQuery) - .append(limitOffset) - .toString() - .replace("{{selectColumns}}", "v.*, a.type"); + String versionsQuerySql = new StringBuilder(selectTemplate).append(where).append(orderByQuery) + .append(limitOffset).toString().replace("{{selectColumns}}", "v.*, a.type"); Query versionsQuery = handle.createQuery(versionsQuerySql); // Query for the total row count - String countQuerySql = new StringBuilder(selectTemplate) - .append(where) - .toString() + String countQuerySql = new StringBuilder(selectTemplate).append(where).toString() .replace("{{selectColumns}}", "count(v.globalId)"); Query countQuery = handle.createQuery(countQuerySql); @@ -1591,39 +1460,35 @@ public VersionSearchResultsDto searchVersions(Set filters, OrderBy }); } - @Override @Transactional public StoredArtifactVersionDto getArtifactVersionContent(long globalId) throws ArtifactNotFoundException, RegistryStorageException { log.debug("Selecting a single artifact version by globalId: {}", globalId); return handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionContentByGlobalId()) - .bind(0, globalId) - .map(StoredArtifactMapper.instance) - .findOne(); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionContentByGlobalId()).bind(0, globalId) + .map(StoredArtifactMapper.instance).findOne(); return res.orElseThrow(() -> new ArtifactNotFoundException(null, "gid-" + globalId)); }); } - @Override @Transactional - public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, String version) + public StoredArtifactVersionDto getArtifactVersionContent(String groupId, String artifactId, + String version) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { - log.debug("Selecting a single artifact version by artifactId: {} {} and version {}", groupId, artifactId, version); + log.debug("Selecting a single artifact version by artifactId: {} {} and version {}", groupId, + artifactId, version); return handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionContent()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(StoredArtifactMapper.instance) - .findOne(); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionContent()) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(StoredArtifactMapper.instance).findOne(); return res.orElseThrow(() -> new ArtifactNotFoundException(groupId, artifactId)); }); } - @Override @Transactional public void deleteArtifactVersion(String groupId, String artifactId, String version) @@ -1632,11 +1497,8 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers handles.withHandle(handle -> { // Delete version - int rows = handle.createUpdate(sqlStatements.deleteVersion()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .execute(); + int rows = handle.createUpdate(sqlStatements.deleteVersion()).bind(0, normalizeGroupId(groupId)) + .bind(1, artifactId).bind(2, version).execute(); if (rows == 0) { throw new VersionNotFoundException(groupId, artifactId, version); @@ -1657,32 +1519,30 @@ public void deleteArtifactVersion(String groupId, String artifactId, String vers public ArtifactVersionMetaDataDto getArtifactVersionMetaData(Long globalId) throws VersionNotFoundException, RegistryStorageException { return handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionMetaDataByGlobalId()) - .bind(0, globalId) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .findOne(); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionMetaDataByGlobalId()).bind(0, globalId) + .map(ArtifactVersionMetaDataDtoMapper.instance).findOne(); return res.orElseThrow(() -> new VersionNotFoundException(globalId)); }); } @Override @Transactional - public ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, String version) { + public ArtifactVersionMetaDataDto getArtifactVersionMetaData(String groupId, String artifactId, + String version) { return handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionMetaData()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .findOne(); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionMetaData()) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(ArtifactVersionMetaDataDtoMapper.instance).findOne(); return res.orElseThrow(() -> new VersionNotFoundException(groupId, artifactId, version)); }); } - @Override @Transactional - public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, EditableVersionMetaDataDto editableMetadata) + public void updateArtifactVersionMetaData(String groupId, String artifactId, String version, + EditableVersionMetaDataDto editableMetadata) throws ArtifactNotFoundException, VersionNotFoundException, RegistryStorageException { log.debug("Updating meta-data for an artifact version: {} {}", groupId, artifactId); @@ -1691,11 +1551,8 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str handles.withHandle(handle -> { if (editableMetadata.getName() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactVersionNameByGAV()) - .bind(0, limitStr(editableMetadata.getName(), 512)) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, version) - .execute(); + .bind(0, limitStr(editableMetadata.getName(), 512)).bind(1, normalizeGroupId(groupId)) + .bind(2, artifactId).bind(3, version).execute(); if (rowCount == 0) { throw new VersionNotFoundException(groupId, artifactId, version); } @@ -1704,10 +1561,7 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str if (editableMetadata.getDescription() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactVersionDescriptionByGAV()) .bind(0, limitStr(editableMetadata.getDescription(), 1024)) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, version) - .execute(); + .bind(1, normalizeGroupId(groupId)).bind(2, artifactId).bind(3, version).execute(); if (rowCount == 0) { throw new VersionNotFoundException(groupId, artifactId, version); } @@ -1715,11 +1569,8 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str if (editableMetadata.getState() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactVersionStateByGAV()) - .bind(0, editableMetadata.getState().name()) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, version) - .execute(); + .bind(0, editableMetadata.getState().name()).bind(1, normalizeGroupId(groupId)) + .bind(2, artifactId).bind(3, version).execute(); if (rowCount == 0) { throw new VersionNotFoundException(groupId, artifactId, version); } @@ -1728,17 +1579,13 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str if (editableMetadata.getLabels() != null) { int rowCount = handle.createUpdate(sqlStatements.updateArtifactVersionLabelsByGAV()) .bind(0, SqlUtil.serializeLabels(editableMetadata.getLabels())) - .bind(1, normalizeGroupId(groupId)) - .bind(2, artifactId) - .bind(3, version) - .execute(); + .bind(1, normalizeGroupId(groupId)).bind(2, artifactId).bind(3, version).execute(); if (rowCount == 0) { throw new VersionNotFoundException(groupId, artifactId, version); } // Delete all appropriate rows in the "version_labels" table - handle.createUpdate(sqlStatements.deleteVersionLabelsByGlobalId()) - .bind(0, globalId) + handle.createUpdate(sqlStatements.deleteVersionLabelsByGlobalId()).bind(0, globalId) .execute(); // Insert new labels into the "version_labels" table @@ -1746,11 +1593,8 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str if (labels != null && !labels.isEmpty()) { labels.forEach((k, v) -> { String sqli = sqlStatements.insertVersionLabel(); - handle.createUpdate(sqli) - .bind(0, globalId) - .bind(1, limitStr(k.toLowerCase(), 256)) - .bind(2, limitStr(asLowerCase(v), 512)) - .execute(); + handle.createUpdate(sqli).bind(0, globalId).bind(1, limitStr(k.toLowerCase(), 256)) + .bind(2, limitStr(asLowerCase(v), 512)).execute(); }); } } @@ -1759,11 +1603,12 @@ public void updateArtifactVersionMetaData(String groupId, String artifactId, Str }); } - @Override @Transactional - public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, String value) { - log.debug("Inserting an artifact comment row for artifact: {} {} version: {}", groupId, artifactId, version); + public CommentDto createArtifactVersionComment(String groupId, String artifactId, String version, + String value) { + log.debug("Inserting an artifact comment row for artifact: {} {} version: {}", groupId, artifactId, + version); String owner = securityIdentity.getPrincipal().getName(); Date createdOn = new Date(); @@ -1771,24 +1616,16 @@ public CommentDto createArtifactVersionComment(String groupId, String artifactId try { var metadata = getArtifactVersionMetaData(groupId, artifactId, version); - var entity = CommentEntity.builder() - .commentId(String.valueOf(nextCommentId())) - .globalId(metadata.getGlobalId()) - .owner(owner) - .createdOn(createdOn.getTime()) - .value(value) + var entity = CommentEntity.builder().commentId(String.valueOf(nextCommentId())) + .globalId(metadata.getGlobalId()).owner(owner).createdOn(createdOn.getTime()).value(value) .build(); importComment(entity); log.debug("Comment row successfully inserted."); - return CommentDto.builder() - .commentId(entity.commentId) - .owner(owner) - .createdOn(createdOn.getTime()) - .value(value) - .build(); + return CommentDto.builder().commentId(entity.commentId).owner(owner) + .createdOn(createdOn.getTime()).value(value).build(); } catch (VersionNotFoundException ex) { throw ex; } catch (Exception ex) { @@ -1799,20 +1636,17 @@ public CommentDto createArtifactVersionComment(String groupId, String artifactId } } - @Override @Transactional public List getArtifactVersionComments(String groupId, String artifactId, String version) { - log.debug("Getting a list of all artifact version comments for: {} {} @ {}", groupId, artifactId, version); + log.debug("Getting a list of all artifact version comments for: {} {} @ {}", groupId, artifactId, + version); try { return handles.withHandle(handle -> { return handle.createQuery(sqlStatements.selectVersionComments()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(CommentDtoMapper.instance) - .list(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(CommentDtoMapper.instance).list(); }); } catch (ArtifactNotFoundException ex) { throw ex; @@ -1821,27 +1655,23 @@ public List getArtifactVersionComments(String groupId, String artifa } } - @Override @Transactional - public void deleteArtifactVersionComment(String groupId, String artifactId, String version, String commentId) { + public void deleteArtifactVersionComment(String groupId, String artifactId, String version, + String commentId) { log.debug("Deleting a version comment for artifact: {} {} @ {}", groupId, artifactId, version); String deletedBy = securityIdentity.getPrincipal().getName(); handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionMetaData()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .findOne(); - ArtifactVersionMetaDataDto avmdd = res.orElseThrow(() -> new VersionNotFoundException(groupId, artifactId, version)); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionMetaData()) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(ArtifactVersionMetaDataDtoMapper.instance).findOne(); + ArtifactVersionMetaDataDto avmdd = res + .orElseThrow(() -> new VersionNotFoundException(groupId, artifactId, version)); int rowCount = handle.createUpdate(sqlStatements.deleteVersionComment()) - .bind(0, avmdd.getGlobalId()) - .bind(1, commentId) - .bind(2, deletedBy) - .execute(); + .bind(0, avmdd.getGlobalId()).bind(1, commentId).bind(2, deletedBy).execute(); if (rowCount == 0) { throw new CommentNotFoundException(commentId); } @@ -1849,28 +1679,23 @@ public void deleteArtifactVersionComment(String groupId, String artifactId, Stri }); } - @Override @Transactional - public void updateArtifactVersionComment(String groupId, String artifactId, String version, String commentId, String value) { + public void updateArtifactVersionComment(String groupId, String artifactId, String version, + String commentId, String value) { log.debug("Updating a comment for artifact: {} {} @ {}", groupId, artifactId, version); String modifiedBy = securityIdentity.getPrincipal().getName(); handles.withHandle(handle -> { - Optional res = handle.createQuery(sqlStatements.selectArtifactVersionMetaData()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(ArtifactVersionMetaDataDtoMapper.instance) - .findOne(); - ArtifactVersionMetaDataDto avmdd = res.orElseThrow(() -> new VersionNotFoundException(groupId, artifactId, version)); - - int rowCount = handle.createUpdate(sqlStatements.updateVersionComment()) - .bind(0, value) - .bind(1, avmdd.getGlobalId()) - .bind(2, commentId) - .bind(3, modifiedBy) - .execute(); + Optional res = handle + .createQuery(sqlStatements.selectArtifactVersionMetaData()) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(ArtifactVersionMetaDataDtoMapper.instance).findOne(); + ArtifactVersionMetaDataDto avmdd = res + .orElseThrow(() -> new VersionNotFoundException(groupId, artifactId, version)); + + int rowCount = handle.createUpdate(sqlStatements.updateVersionComment()).bind(0, value) + .bind(1, avmdd.getGlobalId()).bind(2, commentId).bind(3, modifiedBy).execute(); if (rowCount == 0) { throw new CommentNotFoundException(commentId); } @@ -1878,18 +1703,15 @@ public void updateArtifactVersionComment(String groupId, String artifactId, Stri }); } - @Override @Transactional public List getGlobalRules() throws RegistryStorageException { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements.selectGlobalRules()) - .map(rs -> RuleType.fromValue(rs.getString("type"))) - .list(); + .map(rs -> RuleType.fromValue(rs.getString("type"))).list(); }); } - @Override @Transactional public void createGlobalRule(RuleType rule, RuleConfigurationDto config) @@ -1897,10 +1719,8 @@ public void createGlobalRule(RuleType rule, RuleConfigurationDto config) log.debug("Inserting a global rule row for: {}", rule.name()); try { handles.withHandle(handle -> { - handle.createUpdate(sqlStatements.insertGlobalRule()) - .bind(0, rule.name()) - .bind(1, config.getConfiguration()) - .execute(); + handle.createUpdate(sqlStatements.insertGlobalRule()).bind(0, rule.name()) + .bind(1, config.getConfiguration()).execute(); return null; }); } catch (Exception ex) { @@ -1911,19 +1731,16 @@ public void createGlobalRule(RuleType rule, RuleConfigurationDto config) } } - @Override @Transactional public void deleteGlobalRules() throws RegistryStorageException { log.debug("Deleting all Global Rules"); handles.withHandleNoException(handle -> { - handle.createUpdate(sqlStatements.deleteGlobalRules()) - .execute(); + handle.createUpdate(sqlStatements.deleteGlobalRules()).execute(); return null; }); } - @Override @Transactional public RuleConfigurationDto getGlobalRule(RuleType rule) @@ -1931,14 +1748,11 @@ public RuleConfigurationDto getGlobalRule(RuleType rule) log.debug("Selecting a single global rule: {}", rule.name()); return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectGlobalRuleByType()) - .bind(0, rule.name()) - .map(RuleConfigurationDtoMapper.instance) - .findOne(); + .bind(0, rule.name()).map(RuleConfigurationDtoMapper.instance).findOne(); return res.orElseThrow(() -> new RuleNotFoundException(rule)); }); } - @Override @Transactional public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) @@ -1946,9 +1760,7 @@ public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) log.debug("Updating a global rule: {}::{}", rule.name(), config.getConfiguration()); handles.withHandle(handle -> { int rowCount = handle.createUpdate(sqlStatements.updateGlobalRule()) - .bind(0, config.getConfiguration()) - .bind(1, rule.name()) - .execute(); + .bind(0, config.getConfiguration()).bind(1, rule.name()).execute(); if (rowCount == 0) { throw new RuleNotFoundException(rule); } @@ -1956,14 +1768,12 @@ public void updateGlobalRule(RuleType rule, RuleConfigurationDto config) }); } - @Override @Transactional public void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, RegistryStorageException { log.debug("Deleting a global rule: {}", rule.name()); handles.withHandle(handle -> { - int rowCount = handle.createUpdate(sqlStatements.deleteGlobalRule()) - .bind(0, rule.name()) + int rowCount = handle.createUpdate(sqlStatements.deleteGlobalRule()).bind(0, rule.name()) .execute(); if (rowCount == 0) { throw new RuleNotFoundException(rule); @@ -1972,96 +1782,77 @@ public void deleteGlobalRule(RuleType rule) throws RuleNotFoundException, Regist }); } - @Override @Transactional public List getConfigProperties() throws RegistryStorageException { log.debug("Getting all config properties."); return handles.withHandleNoException(handle -> { String sql = sqlStatements.selectConfigProperties(); - return handle.createQuery(sql) - .map(DynamicConfigPropertyDtoMapper.instance) - .list() - .stream() + return handle.createQuery(sql).map(DynamicConfigPropertyDtoMapper.instance).list().stream() // Filter out possible null values. - .filter(Objects::nonNull) - .collect(toList()); + .filter(Objects::nonNull).collect(toList()); }); } - @Override public DynamicConfigPropertyDto getConfigProperty(String propertyName) throws RegistryStorageException { return getRawConfigProperty(propertyName); // TODO Replace this? } - @Override @Transactional public DynamicConfigPropertyDto getRawConfigProperty(String propertyName) { log.debug("Selecting a single config property: {}", propertyName); return handles.withHandle(handle -> { final String normalizedPropertyName = DtoUtil.appAuthPropertyToRegistry(propertyName); - Optional res = handle.createQuery(sqlStatements.selectConfigPropertyByName()) - .bind(0, normalizedPropertyName) - .map(DynamicConfigPropertyDtoMapper.instance) - .findOne(); + Optional res = handle + .createQuery(sqlStatements.selectConfigPropertyByName()).bind(0, normalizedPropertyName) + .map(DynamicConfigPropertyDtoMapper.instance).findOne(); return res.orElse(null); }); } - @Override @Transactional public void setConfigProperty(DynamicConfigPropertyDto propertyDto) throws RegistryStorageException { - log.debug("Setting a config property with name: {} and value: {}", propertyDto.getName(), propertyDto.getValue()); + log.debug("Setting a config property with name: {} and value: {}", propertyDto.getName(), + propertyDto.getValue()); handles.withHandleNoException(handle -> { String propertyName = propertyDto.getName(); String propertyValue = propertyDto.getValue(); // First delete the property row from the table // TODO Use deleteConfigProperty - handle.createUpdate(sqlStatements.deleteConfigProperty()) - .bind(0, propertyName) - .execute(); + handle.createUpdate(sqlStatements.deleteConfigProperty()).bind(0, propertyName).execute(); // Then create the row again with the new value - handle.createUpdate(sqlStatements.insertConfigProperty()) - .bind(0, propertyName) - .bind(1, propertyValue) - .bind(2, java.lang.System.currentTimeMillis()) - .execute(); + handle.createUpdate(sqlStatements.insertConfigProperty()).bind(0, propertyName) + .bind(1, propertyValue).bind(2, java.lang.System.currentTimeMillis()).execute(); return null; }); } - @Override @Transactional public void deleteConfigProperty(String propertyName) throws RegistryStorageException { handles.withHandle(handle -> { - handle.createUpdate(sqlStatements.deleteConfigProperty()) - .bind(0, propertyName) - .execute(); + handle.createUpdate(sqlStatements.deleteConfigProperty()).bind(0, propertyName).execute(); return null; }); } - @Override @Transactional - public List getStaleConfigProperties(Instant lastRefresh) throws RegistryStorageException { + public List getStaleConfigProperties(Instant lastRefresh) + throws RegistryStorageException { log.debug("Getting all stale config properties."); return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements.selectStaleConfigProperties()) - .bind(0, lastRefresh.toEpochMilli()) - .map(DynamicConfigPropertyDtoMapper.instance) - .list() + .bind(0, lastRefresh.toEpochMilli()).map(DynamicConfigPropertyDtoMapper.instance).list() .stream() // Filter out possible null values. - .filter(Objects::nonNull) - .collect(toList()); + .filter(Objects::nonNull).collect(toList()); }); } @@ -2070,35 +1861,32 @@ public List getStaleConfigProperties(Instant lastRefre */ @Override @Transactional - public void createGroup(GroupMetaDataDto group) throws GroupAlreadyExistsException, RegistryStorageException { + public void createGroup(GroupMetaDataDto group) + throws GroupAlreadyExistsException, RegistryStorageException { try { handles.withHandle(handle -> { // Insert a row into the groups table - handle.createUpdate(sqlStatements.insertGroup()) - .bind(0, group.getGroupId()) - .bind(1, group.getDescription()) - .bind(2, group.getArtifactsType()) + handle.createUpdate(sqlStatements.insertGroup()).bind(0, group.getGroupId()) + .bind(1, group.getDescription()).bind(2, group.getArtifactsType()) .bind(3, group.getOwner()) - // TODO io.apicurio.registry.storage.dto.GroupMetaDataDto should not use raw numeric timestamps + // TODO io.apicurio.registry.storage.dto.GroupMetaDataDto should not use raw numeric + // timestamps .bind(4, group.getCreatedOn() == 0 ? new Date() : new Date(group.getCreatedOn())) .bind(5, group.getModifiedBy()) .bind(6, group.getModifiedOn() == 0 ? new Date() : new Date(group.getModifiedOn())) - .bind(7, SqlUtil.serializeLabels(group.getLabels())) - .execute(); - + .bind(7, SqlUtil.serializeLabels(group.getLabels())).execute(); + // Insert new labels into the "group_labels" table Map labels = group.getLabels(); if (labels != null && !labels.isEmpty()) { labels.forEach((k, v) -> { String sqli = sqlStatements.insertGroupLabel(); - handle.createUpdate(sqli) - .bind(0, group.getGroupId()) + handle.createUpdate(sqli).bind(0, group.getGroupId()) .bind(1, limitStr(k.toLowerCase(), 256)) - .bind(2, limitStr(asLowerCase(v), 512)) - .execute(); + .bind(2, limitStr(asLowerCase(v), 512)).execute(); }); } - + return null; }); } catch (Exception ex) { @@ -2109,39 +1897,36 @@ public void createGroup(GroupMetaDataDto group) throws GroupAlreadyExistsExcepti } } - /** * Deletes a group and all artifacts in that group. + * * @see io.apicurio.registry.storage.RegistryStorage#deleteGroup(java.lang.String) */ @Override @Transactional public void deleteGroup(String groupId) throws GroupNotFoundException, RegistryStorageException { handles.withHandleNoException(handle -> { - // Note: delete artifact rules separately. Artifact rules are not set to cascade on delete + // Note: delete artifact rules separately. Artifact rules are not set to cascade on delete // because the Confluent API allows users to configure rules for artifacts that don't exist. :( handle.createUpdate(sqlStatements.deleteArtifactRulesByGroupId()) - .bind(0, normalizeGroupId(groupId)) - .execute(); + .bind(0, normalizeGroupId(groupId)).execute(); // Delete all artifacts in the group (TODO there is currently no FK from artifacts to groups) - handle.createUpdate(sqlStatements.deleteArtifactsByGroupId()) - .bind(0, normalizeGroupId(groupId)) + handle.createUpdate(sqlStatements.deleteArtifactsByGroupId()).bind(0, normalizeGroupId(groupId)) .execute(); // Now delete the group (labels and rules etc will cascade) - int rows = handle.createUpdate(sqlStatements.deleteGroup()) - .bind(0, groupId) - .execute(); + int rows = handle.createUpdate(sqlStatements.deleteGroup()).bind(0, groupId).execute(); if (rows == 0) { throw new GroupNotFoundException(groupId); } return null; }); } - + /** - * @see io.apicurio.registry.storage.RegistryStorage#updateGroupMetaData(java.lang.String, io.apicurio.registry.storage.dto.EditableGroupMetaDataDto) + * @see io.apicurio.registry.storage.RegistryStorage#updateGroupMetaData(java.lang.String, + * io.apicurio.registry.storage.dto.EditableGroupMetaDataDto) */ @Override @Transactional @@ -2152,34 +1937,25 @@ public void updateGroupMetaData(String groupId, EditableGroupMetaDataDto dto) { handles.withHandleNoException(handle -> { // Update the row in the groups table - int rows = handle.createUpdate(sqlStatements.updateGroup()) - .bind(0, dto.getDescription()) - .bind(1, modifiedBy) - .bind(2, modifiedOn) - .bind(3, SqlUtil.serializeLabels(dto.getLabels())) - .bind(4, groupId) - .execute(); + int rows = handle.createUpdate(sqlStatements.updateGroup()).bind(0, dto.getDescription()) + .bind(1, modifiedBy).bind(2, modifiedOn).bind(3, SqlUtil.serializeLabels(dto.getLabels())) + .bind(4, groupId).execute(); if (rows == 0) { throw new GroupNotFoundException(groupId); } - + // Delete all appropriate rows in the "group_labels" table - handle.createUpdate(sqlStatements.deleteGroupLabelsByGroupId()) - .bind(0, groupId) - .execute(); + handle.createUpdate(sqlStatements.deleteGroupLabelsByGroupId()).bind(0, groupId).execute(); // Insert new labels into the "group_labels" table if (dto.getLabels() != null && !dto.getLabels().isEmpty()) { dto.getLabels().forEach((k, v) -> { String sqli = sqlStatements.insertGroupLabel(); - handle.createUpdate(sqli) - .bind(0, groupId) - .bind(1, limitStr(k.toLowerCase(), 256)) - .bind(2, limitStr(asLowerCase(v), 512)) - .execute(); + handle.createUpdate(sqli).bind(0, groupId).bind(1, limitStr(k.toLowerCase(), 256)) + .bind(2, limitStr(asLowerCase(v), 512)).execute(); }); } - + return null; }); } @@ -2190,26 +1966,21 @@ public List getGroupIds(Integer limit) throws RegistryStorageException { return handles.withHandleNoException(handle -> { Query query = handle.createQuery(sqlStatements.selectGroups()); query.bind(0, limit); - return query - .map(rs -> rs.getString("groupId")) - .list(); + return query.map(rs -> rs.getString("groupId")).list(); }); } - @Override @Transactional - public GroupMetaDataDto getGroupMetaData(String groupId) throws GroupNotFoundException, RegistryStorageException { + public GroupMetaDataDto getGroupMetaData(String groupId) + throws GroupNotFoundException, RegistryStorageException { return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectGroupByGroupId()) - .bind(0, groupId) - .map(GroupMetaDataDtoMapper.instance) - .findOne(); + .bind(0, groupId).map(GroupMetaDataDtoMapper.instance).findOne(); return res.orElseThrow(() -> new GroupNotFoundException(groupId)); }); } - /** * NOTE: Does not export the manifest file TODO */ @@ -2230,10 +2001,8 @@ public void exportData(Function handler) throws RegistryStorageExc // Export all content ///////////////////////////////// handles.withHandle(handle -> { - Stream stream = handle.createQuery(sqlStatements.exportContent()) - .setFetchSize(50) - .map(ContentEntityMapper.instance) - .stream(); + Stream stream = handle.createQuery(sqlStatements.exportContent()).setFetchSize(50) + .map(ContentEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2244,10 +2013,8 @@ public void exportData(Function handler) throws RegistryStorageExc // Export all groups ///////////////////////////////// handles.withHandle(handle -> { - Stream stream = handle.createQuery(sqlStatements.exportGroups()) - .setFetchSize(50) - .map(GroupEntityMapper.instance) - .stream(); + Stream stream = handle.createQuery(sqlStatements.exportGroups()).setFetchSize(50) + .map(GroupEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2259,9 +2026,7 @@ public void exportData(Function handler) throws RegistryStorageExc ///////////////////////////////// handles.withHandle(handle -> { Stream stream = handle.createQuery(sqlStatements.exportArtifacts()) - .setFetchSize(50) - .map(ArtifactEntityMapper.instance) - .stream(); + .setFetchSize(50).map(ArtifactEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2273,9 +2038,7 @@ public void exportData(Function handler) throws RegistryStorageExc ///////////////////////////////// handles.withHandle(handle -> { Stream stream = handle.createQuery(sqlStatements.exportArtifactVersions()) - .setFetchSize(50) - .map(ArtifactVersionEntityMapper.instance) - .stream(); + .setFetchSize(50).map(ArtifactVersionEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2287,9 +2050,7 @@ public void exportData(Function handler) throws RegistryStorageExc ///////////////////////////////// handles.withHandle(handle -> { Stream stream = handle.createQuery(sqlStatements.exportVersionComments()) - .setFetchSize(50) - .map(CommentEntityMapper.instance) - .stream(); + .setFetchSize(50).map(CommentEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2300,10 +2061,8 @@ public void exportData(Function handler) throws RegistryStorageExc // Export all branches ///////////////////////////////// handles.withHandle(handle -> { - Stream stream = handle.createQuery(sqlStatements.exportBranches()) - .setFetchSize(50) - .map(BranchEntityMapper.instance) - .stream(); + Stream stream = handle.createQuery(sqlStatements.exportBranches()).setFetchSize(50) + .map(BranchEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(branch -> { @@ -2318,9 +2077,7 @@ public void exportData(Function handler) throws RegistryStorageExc ///////////////////////////////// handles.withHandle(handle -> { Stream stream = handle.createQuery(sqlStatements.exportArtifactRules()) - .setFetchSize(50) - .map(ArtifactRuleEntityMapper.instance) - .stream(); + .setFetchSize(50).map(ArtifactRuleEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2332,9 +2089,7 @@ public void exportData(Function handler) throws RegistryStorageExc ///////////////////////////////// handles.withHandle(handle -> { Stream stream = handle.createQuery(sqlStatements.exportGlobalRules()) - .setFetchSize(50) - .map(GlobalRuleEntityMapper.instance) - .stream(); + .setFetchSize(50).map(GlobalRuleEntityMapper.instance).stream(); // Process and then close the stream. try (stream) { stream.forEach(handler::apply); @@ -2343,25 +2098,22 @@ public void exportData(Function handler) throws RegistryStorageExc }); } - @Override public void importData(EntityInputStream entities, boolean preserveGlobalId, boolean preserveContentId) { - DataImporter dataImporter = new SqlDataImporter(log, utils, this, preserveGlobalId, preserveContentId); - dataImporter.importData(entities, () -> {}); + DataImporter dataImporter = new SqlDataImporter(log, utils, this, preserveGlobalId, + preserveContentId); + dataImporter.importData(entities, () -> { + }); } - @Override @Transactional public long countArtifacts() throws RegistryStorageException { return handles.withHandle(handle -> { - return handle.createQuery(sqlStatements.selectAllArtifactCount()) - .mapTo(Long.class) - .one(); + return handle.createQuery(sqlStatements.selectAllArtifactCount()).mapTo(Long.class).one(); }); } - @Override @Transactional public long countArtifactVersions(String groupId, String artifactId) throws RegistryStorageException { @@ -2372,36 +2124,30 @@ public long countArtifactVersions(String groupId, String artifactId) throws Regi return handles.withHandle(handle -> countArtifactVersionsRaw(handle, groupId, artifactId)); } - protected long countArtifactVersionsRaw(Handle handle, String groupId, String artifactId) throws RegistryStorageException { + protected long countArtifactVersionsRaw(Handle handle, String groupId, String artifactId) + throws RegistryStorageException { return handle.createQuery(sqlStatements.selectAllArtifactVersionsCount()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .mapTo(Long.class) - .one(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).mapTo(Long.class).one(); } @Override @Transactional public long countTotalArtifactVersions() throws RegistryStorageException { return handles.withHandle(handle -> { - return handle.createQuery(sqlStatements.selectTotalArtifactVersionsCount()) - .mapTo(Long.class) + return handle.createQuery(sqlStatements.selectTotalArtifactVersionsCount()).mapTo(Long.class) .one(); }); } - @Override @Transactional - public void createRoleMapping(String principalId, String role, String principalName) throws RegistryStorageException { + public void createRoleMapping(String principalId, String role, String principalName) + throws RegistryStorageException { log.debug("Inserting a role mapping row for: {}", principalId); try { handles.withHandle(handle -> { - handle.createUpdate(sqlStatements.insertRoleMapping()) - .bind(0, principalId) - .bind(1, role) - .bind(2, principalName) - .execute(); + handle.createUpdate(sqlStatements.insertRoleMapping()).bind(0, principalId).bind(1, role) + .bind(2, principalName).execute(); return null; }); } catch (Exception ex) { @@ -2412,14 +2158,12 @@ public void createRoleMapping(String principalId, String role, String principalN } } - @Override @Transactional public void deleteRoleMapping(String principalId) throws RegistryStorageException { log.debug("Deleting a role mapping row for: {}", principalId); handles.withHandle(handle -> { - int rowCount = handle.createUpdate(sqlStatements.deleteRoleMapping()) - .bind(0, principalId) + int rowCount = handle.createUpdate(sqlStatements.deleteRoleMapping()).bind(0, principalId) .execute(); if (rowCount == 0) { throw new RoleMappingNotFoundException(principalId); @@ -2428,65 +2172,50 @@ public void deleteRoleMapping(String principalId) throws RegistryStorageExceptio }); } - @Override @Transactional public RoleMappingDto getRoleMapping(String principalId) throws RegistryStorageException { log.debug("Selecting a single role mapping for: {}", principalId); return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectRoleMappingByPrincipalId()) - .bind(0, principalId) - .map(RoleMappingDtoMapper.instance) - .findOne(); + .bind(0, principalId).map(RoleMappingDtoMapper.instance).findOne(); return res.orElseThrow(() -> new RoleMappingNotFoundException(principalId)); }); } - @Override @Transactional public String getRoleForPrincipal(String principalId) throws RegistryStorageException { log.debug("Selecting the role for: {}", principalId); return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectRoleByPrincipalId()) - .bind(0, principalId) - .mapTo(String.class) - .findOne(); + .bind(0, principalId).mapTo(String.class).findOne(); return res.orElse(null); }); } - @Override @Transactional public List getRoleMappings() throws RegistryStorageException { log.debug("Getting a list of all role mappings."); return handles.withHandleNoException(handle -> { - return handle.createQuery(sqlStatements.selectRoleMappings()) - .map(RoleMappingDtoMapper.instance) + return handle.createQuery(sqlStatements.selectRoleMappings()).map(RoleMappingDtoMapper.instance) .list(); }); } @Override @Transactional - public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) throws RegistryStorageException { + public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) + throws RegistryStorageException { log.debug("Searching role mappings."); return handles.withHandleNoException(handle -> { String query = sqlStatements.selectRoleMappings() + " LIMIT ? OFFSET ?"; String countQuery = sqlStatements.countRoleMappings(); - List mappings = handle.createQuery(query) - .bind(0, limit) - .bind(1, offset) - .map(RoleMappingDtoMapper.instance) - .list(); - Integer count = handle.createQuery(countQuery) - .mapTo(Integer.class) - .one(); - return RoleMappingSearchResultsDto.builder() - .count(count) - .roleMappings(mappings) - .build(); + List mappings = handle.createQuery(query).bind(0, limit).bind(1, offset) + .map(RoleMappingDtoMapper.instance).list(); + Integer count = handle.createQuery(countQuery).mapTo(Integer.class).one(); + return RoleMappingSearchResultsDto.builder().count(count).roleMappings(mappings).build(); }); } @@ -2495,10 +2224,8 @@ public RoleMappingSearchResultsDto searchRoleMappings(int offset, int limit) thr public void updateRoleMapping(String principalId, String role) throws RegistryStorageException { log.debug("Updating a role mapping: {}::{}", principalId, role); handles.withHandle(handle -> { - int rowCount = handle.createUpdate(sqlStatements.updateRoleMapping()) - .bind(0, role) - .bind(1, principalId) - .execute(); + int rowCount = handle.createUpdate(sqlStatements.updateRoleMapping()).bind(0, role) + .bind(1, principalId).execute(); if (rowCount == 0) { throw new RoleMappingNotFoundException(principalId, role); } @@ -2506,23 +2233,18 @@ public void updateRoleMapping(String principalId, String role) throws RegistrySt }); } - @Override @Transactional public String createDownload(DownloadContextDto context) throws RegistryStorageException { log.debug("Inserting a download."); String downloadId = UUID.randomUUID().toString(); return handles.withHandleNoException(handle -> { - handle.createUpdate(sqlStatements.insertDownload()) - .bind(0, downloadId) - .bind(1, context.getExpires()) - .bind(2, mapper.writeValueAsString(context)) - .execute(); + handle.createUpdate(sqlStatements.insertDownload()).bind(0, downloadId) + .bind(1, context.getExpires()).bind(2, mapper.writeValueAsString(context)).execute(); return downloadId; }); } - @Override @Transactional public DownloadContextDto consumeDownload(String downloadId) throws RegistryStorageException { @@ -2533,16 +2255,11 @@ public DownloadContextDto consumeDownload(String downloadId) throws RegistryStor // Select the download context. Optional res = handle.createQuery(sqlStatements.selectDownloadContext()) - .bind(0, downloadId) - .bind(1, now) - .mapTo(String.class) - .findOne(); + .bind(0, downloadId).bind(1, now).mapTo(String.class).findOne(); String downloadContext = res.orElseThrow(DownloadNotFoundException::new); // Attempt to delete the row. - int rowCount = handle.createUpdate(sqlStatements.deleteDownload()) - .bind(0, downloadId) - .execute(); + int rowCount = handle.createUpdate(sqlStatements.deleteDownload()).bind(0, downloadId).execute(); if (rowCount == 0) { throw new DownloadNotFoundException(); } @@ -2552,21 +2269,17 @@ public DownloadContextDto consumeDownload(String downloadId) throws RegistryStor }); } - @Override @Transactional public void deleteAllExpiredDownloads() throws RegistryStorageException { log.debug("Deleting all expired downloads"); long now = java.lang.System.currentTimeMillis(); handles.withHandleNoException(handle -> { - handle.createUpdate(sqlStatements.deleteExpiredDownloads()) - .bind(0, now) - .execute(); + handle.createUpdate(sqlStatements.deleteExpiredDownloads()).bind(0, now).execute(); return null; }); } - @Override @Transactional public void deleteAllUserData() { @@ -2577,42 +2290,31 @@ public void deleteAllUserData() { handles.withHandleNoException(handle -> { // Delete all artifacts and related data - handle.createUpdate(sqlStatements.deleteAllContentReferences()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllContentReferences()).execute(); - handle.createUpdate(sqlStatements.deleteVersionLabelsByAll()) - .execute(); + handle.createUpdate(sqlStatements.deleteVersionLabelsByAll()).execute(); - handle.createUpdate(sqlStatements.deleteAllVersionComments()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllVersionComments()).execute(); - handle.createUpdate(sqlStatements.deleteAllBranches()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllBranches()).execute(); - handle.createUpdate(sqlStatements.deleteAllVersions()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllVersions()).execute(); - handle.createUpdate(sqlStatements.deleteAllArtifactRules()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllArtifactRules()).execute(); - handle.createUpdate(sqlStatements.deleteAllArtifacts()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllArtifacts()).execute(); // Delete all groups - handle.createUpdate(sqlStatements.deleteAllGroups()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllGroups()).execute(); // Delete all role mappings - handle.createUpdate(sqlStatements.deleteAllRoleMappings()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllRoleMappings()).execute(); // Delete all content - handle.createUpdate(sqlStatements.deleteAllContent()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllContent()).execute(); // Delete all config properties - handle.createUpdate(sqlStatements.deleteAllConfigProperties()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllConfigProperties()).execute(); // TODO Do we need to delete comments? @@ -2621,7 +2323,6 @@ public void deleteAllUserData() { } - @Override @Transactional public Map resolveReferences(List references) { @@ -2634,88 +2335,73 @@ public Map resolveReferences(List re } } - @Override @Transactional public boolean isArtifactExists(String groupId, String artifactId) throws RegistryStorageException { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectArtifactCountById()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .mapTo(Integer.class) - .one() > 0; + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).mapTo(Integer.class).one() > 0; }); } - @Override @Transactional public boolean isGroupExists(String groupId) throws RegistryStorageException { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectGroupCountById()) - .bind(0, normalizeGroupId(groupId)) - .mapTo(Integer.class) - .one() > 0; + .bind(0, normalizeGroupId(groupId)).mapTo(Integer.class).one() > 0; }); } - @Override @Transactional - public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { + public List getContentIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectContentIdsReferencingArtifactBy()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .mapTo(Long.class) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version).mapTo(Long.class) .list(); }); } - @Override @Transactional - public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, String version) { + public List getGlobalIdsReferencingArtifactVersion(String groupId, String artifactId, + String version) { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectGlobalIdsReferencingArtifactBy()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .mapTo(Long.class) + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version).mapTo(Long.class) .list(); }); } - @Override @Transactional - public List getInboundArtifactReferences(String groupId, String artifactId, String version) { + public List getInboundArtifactReferences(String groupId, String artifactId, + String version) { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectInboundContentReferencesByGAV()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, version) - .map(ArtifactReferenceDtoMapper.instance) - .list(); + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, version) + .map(ArtifactReferenceDtoMapper.instance).list(); }); } - @Override - public boolean isArtifactVersionExists(String groupId, String artifactId, String version) throws RegistryStorageException { + public boolean isArtifactVersionExists(String groupId, String artifactId, String version) + throws RegistryStorageException { try { getArtifactVersionMetaData(groupId, artifactId, version); return true; } catch (VersionNotFoundException ignored) { - return false; // TODO Similar exception is thrown in some method callers, do we need this? Or use a different query. + return false; // TODO Similar exception is thrown in some method callers, do we need this? Or use + // a different query. } } - @Override @Transactional - public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, OrderDirection orderDirection, Integer offset, Integer limit) { + public GroupSearchResultsDto searchGroups(Set filters, OrderBy orderBy, + OrderDirection orderDirection, Integer offset, Integer limit) { return handles.withHandleNoException(handle -> { List binders = new LinkedList<>(); String op; @@ -2754,7 +2440,8 @@ public GroupSearchResultsDto searchGroups(Set filters, OrderBy ord case labels: op = filter.isNot() ? "!=" : "="; Pair label = filter.getLabelFilterValue(); - // Note: convert search to lowercase when searching for labels (case-insensitivity support). + // Note: convert search to lowercase when searching for labels (case-insensitivity + // support). String labelKey = label.getKey().toLowerCase(); where.append("EXISTS(SELECT l.* FROM group_labels l WHERE l.labelKey " + op + " ?"); binders.add((query, idx) -> { @@ -2796,17 +2483,11 @@ public GroupSearchResultsDto searchGroups(Set filters, OrderBy ord } // Query for the group - String groupsQuerySql = new StringBuilder(selectTemplate) - .append(where) - .append(orderByQuery) - .append(limitOffset) - .toString() - .replace("{{selectColumns}}", "*"); + String groupsQuerySql = new StringBuilder(selectTemplate).append(where).append(orderByQuery) + .append(limitOffset).toString().replace("{{selectColumns}}", "*"); Query groupsQuery = handle.createQuery(groupsQuerySql); // Query for the total row count - String countQuerySql = new StringBuilder(selectTemplate) - .append(where) - .toString() + String countQuerySql = new StringBuilder(selectTemplate).append(where).toString() .replace("{{selectColumns}}", "count(g.groupId)"); Query countQuery = handle.createQuery(countQuerySql); @@ -2838,23 +2519,28 @@ public GroupSearchResultsDto searchGroups(Set filters, OrderBy ord }); } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ - private void resolveReferences(Map resolvedReferences, List references) { + private void resolveReferences(Map resolvedReferences, + List references) { if (references != null && !references.isEmpty()) { for (ArtifactReferenceDto reference : references) { - if (reference.getArtifactId() == null || reference.getName() == null || reference.getVersion() == null) { + if (reference.getArtifactId() == null || reference.getName() == null + || reference.getVersion() == null) { throw new IllegalStateException("Invalid reference: " + reference); } else { if (!resolvedReferences.containsKey(reference.getName())) { - //TODO improve exception handling + // TODO improve exception handling try { - final ArtifactVersionMetaDataDto referencedArtifactMetaData = getArtifactVersionMetaData(reference.getGroupId(), reference.getArtifactId(), reference.getVersion()); - final ContentWrapperDto referencedContent = getContentById(referencedArtifactMetaData.getContentId()); + final ArtifactVersionMetaDataDto referencedArtifactMetaData = getArtifactVersionMetaData( + reference.getGroupId(), reference.getArtifactId(), + reference.getVersion()); + final ContentWrapperDto referencedContent = getContentById( + referencedArtifactMetaData.getContentId()); resolveReferences(resolvedReferences, referencedContent.getReferences()); - TypedContent typedContent = TypedContent.create(referencedContent.getContent(), referencedContent.getContentType()); + TypedContent typedContent = TypedContent.create(referencedContent.getContent(), + referencedContent.getContentType()); resolvedReferences.put(reference.getName(), typedContent); } catch (VersionNotFoundException ex) { // Ignored @@ -2865,7 +2551,6 @@ private void resolveReferences(Map resolvedReferences, Lis } } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ @@ -2875,81 +2560,64 @@ private void deleteAllOrphanedContent() { handles.withHandleNoException(handle -> { // Delete orphaned references - handle.createUpdate(sqlStatements.deleteOrphanedContentReferences()) - .execute(); + handle.createUpdate(sqlStatements.deleteOrphanedContentReferences()).execute(); // Delete orphaned content - handle.createUpdate(sqlStatements.deleteAllOrphanedContent()) - .execute(); + handle.createUpdate(sqlStatements.deleteAllOrphanedContent()).execute(); return null; }); } - @Override @Transactional public void resetGlobalId() { resetSequence(GLOBAL_ID_SEQUENCE, sqlStatements.selectMaxGlobalId()); } - @Override @Transactional public void resetContentId() { resetSequence(CONTENT_ID_SEQUENCE, sqlStatements.selectMaxContentId()); } - @Override @Transactional public void resetCommentId() { resetSequence(COMMENT_ID_SEQUENCE, sqlStatements.selectMaxVersionCommentId()); } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private void resetSequence(String sequenceName, String sqlMaxIdFromTable) { handles.withHandleNoException(handle -> { - Optional maxIdTable = handle.createQuery(sqlMaxIdFromTable) - .mapTo(Long.class) - .findOne(); + Optional maxIdTable = handle.createQuery(sqlMaxIdFromTable).mapTo(Long.class).findOne(); Optional currentIdSeq = handle.createQuery(sqlStatements.selectCurrentSequenceValue()) - .bind(0, sequenceName) - .mapTo(Long.class) - .findOne(); - - //TODO maybe do this in one query - Optional maxId = maxIdTable - .map(maxIdTableValue -> { - if (currentIdSeq.isPresent()) { - if (currentIdSeq.get() > maxIdTableValue) { - //id in sequence is bigger than max value in table - return currentIdSeq.get(); - } - } - //max value in table is bigger that id in sequence - return maxIdTableValue; - }); - + .bind(0, sequenceName).mapTo(Long.class).findOne(); + + // TODO maybe do this in one query + Optional maxId = maxIdTable.map(maxIdTableValue -> { + if (currentIdSeq.isPresent()) { + if (currentIdSeq.get() > maxIdTableValue) { + // id in sequence is bigger than max value in table + return currentIdSeq.get(); + } + } + // max value in table is bigger that id in sequence + return maxIdTableValue; + }); if (maxId.isPresent()) { log.info("Resetting {} sequence", sequenceName); long id = maxId.get(); if ("postgresql".equals(sqlStatements.dbType())) { - handle.createUpdate(sqlStatements.resetSequenceValue()) - .bind(0, sequenceName) - .bind(1, id) - .bind(2, id) - .execute(); + handle.createUpdate(sqlStatements.resetSequenceValue()).bind(0, sequenceName).bind(1, id) + .bind(2, id).execute(); } else { - handle.createUpdate(sqlStatements.resetSequenceValue()) - .bind(0, sequenceName) - .bind(1, id) + handle.createUpdate(sqlStatements.resetSequenceValue()).bind(0, sequenceName).bind(1, id) .execute(); } @@ -2959,18 +2627,14 @@ private void resetSequence(String sequenceName, String sqlMaxIdFromTable) { }); } - @Override @Transactional public void importArtifactRule(ArtifactRuleEntity entity) { handles.withHandleNoException(handle -> { if (isArtifactExists(entity.groupId, entity.artifactId)) { handle.createUpdate(sqlStatements.importArtifactRule()) - .bind(0, normalizeGroupId(entity.groupId)) - .bind(1, entity.artifactId) - .bind(2, entity.type.name()) - .bind(3, entity.configuration) - .execute(); + .bind(0, normalizeGroupId(entity.groupId)).bind(1, entity.artifactId) + .bind(2, entity.type.name()).bind(3, entity.configuration).execute(); } else { throw new ArtifactNotFoundException(entity.groupId, entity.artifactId); } @@ -2983,28 +2647,18 @@ public void importArtifact(ArtifactEntity entity) { handles.withHandleNoException(handle -> { if (!isArtifactExists(entity.groupId, entity.artifactId)) { String labelsStr = SqlUtil.serializeLabels(entity.labels); - handle.createUpdate(sqlStatements.insertArtifact()) - .bind(0, normalizeGroupId(entity.groupId)) - .bind(1, entity.artifactId) - .bind(2, entity.artifactType) - .bind(3, entity.owner) - .bind(4, new Date(entity.createdOn)) - .bind(5, entity.modifiedBy) - .bind(6, new Date(entity.modifiedOn)) - .bind(7, entity.name) - .bind(8, entity.description) - .bind(9, labelsStr) - .execute(); + handle.createUpdate(sqlStatements.insertArtifact()).bind(0, normalizeGroupId(entity.groupId)) + .bind(1, entity.artifactId).bind(2, entity.artifactType).bind(3, entity.owner) + .bind(4, new Date(entity.createdOn)).bind(5, entity.modifiedBy) + .bind(6, new Date(entity.modifiedOn)).bind(7, entity.name).bind(8, entity.description) + .bind(9, labelsStr).execute(); // Insert labels into the "artifact_labels" table if (entity.labels != null && !entity.labels.isEmpty()) { entity.labels.forEach((k, v) -> { handle.createUpdate(sqlStatements.insertArtifactLabel()) - .bind(0, normalizeGroupId(entity.groupId)) - .bind(1, entity.artifactId) - .bind(2, k.toLowerCase()) - .bind(3, v.toLowerCase()) - .execute(); + .bind(0, normalizeGroupId(entity.groupId)).bind(1, entity.artifactId) + .bind(2, k.toLowerCase()).bind(3, v.toLowerCase()).execute(); }); } } else { @@ -3025,31 +2679,20 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { throw new VersionAlreadyExistsException(entity.globalId); } if (!isGlobalIdExists(entity.globalId)) { - handle.createUpdate(sqlStatements.importArtifactVersion()) - .bind(0, entity.globalId) - .bind(1, normalizeGroupId(entity.groupId)) - .bind(2, entity.artifactId) - .bind(3, entity.version) - .bind(4, entity.versionOrder) - .bind(5, entity.state) - .bind(6, entity.name) - .bind(7, entity.description) - .bind(8, entity.owner) - .bind(9, new Date(entity.createdOn)) - .bind(10, entity.modifiedBy) + handle.createUpdate(sqlStatements.importArtifactVersion()).bind(0, entity.globalId) + .bind(1, normalizeGroupId(entity.groupId)).bind(2, entity.artifactId) + .bind(3, entity.version).bind(4, entity.versionOrder).bind(5, entity.state) + .bind(6, entity.name).bind(7, entity.description).bind(8, entity.owner) + .bind(9, new Date(entity.createdOn)).bind(10, entity.modifiedBy) .bind(11, new Date(entity.modifiedOn)) - .bind(12, SqlUtil.serializeLabels(entity.labels)) - .bind(13, entity.contentId) + .bind(12, SqlUtil.serializeLabels(entity.labels)).bind(13, entity.contentId) .execute(); // Insert labels into the "version_labels" table if (entity.labels != null && !entity.labels.isEmpty()) { entity.labels.forEach((k, v) -> { - handle.createUpdate(sqlStatements.insertVersionLabel()) - .bind(0, entity.globalId) - .bind(1, k.toLowerCase()) - .bind(2, v.toLowerCase()) - .execute(); + handle.createUpdate(sqlStatements.insertVersionLabel()).bind(0, entity.globalId) + .bind(1, k.toLowerCase()).bind(2, v.toLowerCase()).execute(); }); } @@ -3061,22 +2704,17 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { }); } - @Override @Transactional public void importContent(ContentEntity entity) { handles.withHandleNoException(handle -> { if (!isContentExists(handle, entity.contentId)) { - handle.createUpdate(sqlStatements.importContent()) - .bind(0, entity.contentId) - .bind(1, entity.canonicalHash) - .bind(2, entity.contentHash) - .bind(3, entity.contentType) - .bind(4, entity.contentBytes) - .bind(5, entity.serializedReferences) - .execute(); + handle.createUpdate(sqlStatements.importContent()).bind(0, entity.contentId) + .bind(1, entity.canonicalHash).bind(2, entity.contentHash).bind(3, entity.contentType) + .bind(4, entity.contentBytes).bind(5, entity.serializedReferences).execute(); - insertReferences(handle, entity.contentId, SqlUtil.deserializeReferences(entity.serializedReferences)); + insertReferences(handle, entity.contentId, + SqlUtil.deserializeReferences(entity.serializedReferences)); } else { throw new ContentAlreadyExistsException(entity.contentId); } @@ -3084,44 +2722,34 @@ public void importContent(ContentEntity entity) { }); } - @Override @Transactional public void importGlobalRule(GlobalRuleEntity entity) { handles.withHandleNoException(handle -> { handle.createUpdate(sqlStatements.importGlobalRule()) // TODO Duplicated SQL query - .bind(0, entity.ruleType.name()) - .bind(1, entity.configuration) - .execute(); + .bind(0, entity.ruleType.name()).bind(1, entity.configuration).execute(); return null; }); } - @Override @Transactional public void importGroup(GroupEntity entity) { if (!isGroupExists(entity.groupId)) { handles.withHandleNoException(handle -> { handle.createUpdate(sqlStatements.importGroup()) - .bind(0, SqlUtil.normalizeGroupId(entity.groupId)) - .bind(1, entity.description) - .bind(2, entity.artifactsType) - .bind(3, entity.owner) - .bind(4, new Date(entity.createdOn)) - .bind(5, entity.modifiedBy) - .bind(6, new Date(entity.modifiedOn)) - .bind(7, SqlUtil.serializeLabels(entity.labels)) + .bind(0, SqlUtil.normalizeGroupId(entity.groupId)).bind(1, entity.description) + .bind(2, entity.artifactsType).bind(3, entity.owner) + .bind(4, new Date(entity.createdOn)).bind(5, entity.modifiedBy) + .bind(6, new Date(entity.modifiedOn)).bind(7, SqlUtil.serializeLabels(entity.labels)) .execute(); // Insert labels into the "group_labels" table if (entity.labels != null && !entity.labels.isEmpty()) { entity.labels.forEach((k, v) -> { handle.createUpdate(sqlStatements.insertGroupLabel()) - .bind(0, normalizeGroupId(entity.groupId)) - .bind(1, k.toLowerCase()) - .bind(2, v.toLowerCase()) - .execute(); + .bind(0, normalizeGroupId(entity.groupId)).bind(1, k.toLowerCase()) + .bind(2, v.toLowerCase()).execute(); }); } @@ -3132,78 +2760,61 @@ public void importGroup(GroupEntity entity) { } } - @Override @Transactional public void importComment(CommentEntity entity) { handles.withHandleNoException(handle -> { - handle.createUpdate(sqlStatements.insertVersionComment()) - .bind(0, entity.commentId) - .bind(1, entity.globalId) - .bind(2, entity.owner) - .bind(3, new Date(entity.createdOn)) - .bind(4, entity.value) - .execute(); + handle.createUpdate(sqlStatements.insertVersionComment()).bind(0, entity.commentId) + .bind(1, entity.globalId).bind(2, entity.owner).bind(3, new Date(entity.createdOn)) + .bind(4, entity.value).execute(); return null; }); } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private boolean isContentExists(Handle handle, long contentId) { - return handle.createQuery(sqlStatements().selectContentExists()) - .bind(0, contentId) - .mapTo(Integer.class) - .one() > 0; + return handle.createQuery(sqlStatements().selectContentExists()).bind(0, contentId) + .mapTo(Integer.class).one() > 0; } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private boolean isGlobalIdExists(long globalId) { return handles.withHandleNoException(handle -> { - return handle.createQuery(sqlStatements().selectGlobalIdExists()) - .bind(0, globalId) - .mapTo(Integer.class) - .one() > 0; + return handle.createQuery(sqlStatements().selectGlobalIdExists()).bind(0, globalId) + .mapTo(Integer.class).one() > 0; }); } - @Override @Transactional public long nextContentId() { return nextSequenceValue(CONTENT_ID_SEQUENCE); } - @Override @Transactional public long nextGlobalId() { return nextSequenceValue(GLOBAL_ID_SEQUENCE); } - @Override @Transactional public long nextCommentId() { return nextSequenceValue(COMMENT_ID_SEQUENCE); } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private long nextSequenceValue(String sequenceName) { return handles.withHandleNoException(handle -> { if (Set.of("mssql", "postgresql").contains(sqlStatements.dbType())) { - return handle.createQuery(sqlStatements.getNextSequenceValue()) - .bind(0, sequenceName) - .mapTo(Long.class) - .one(); // TODO Handle non-existing sequence (see resetSequence) + return handle.createQuery(sqlStatements.getNextSequenceValue()).bind(0, sequenceName) + .mapTo(Long.class).one(); // TODO Handle non-existing sequence (see resetSequence) } else { // no way to automatically increment the sequence in h2 with just one query // we are increasing the sequence value in a way that it's not safe for concurrent executions @@ -3213,23 +2824,17 @@ private long nextSequenceValue(String sequenceName) { // caveat emptor , consider yourself as warned synchronized (inmemorySequencesMutex) { // TODO Use implementation from common app components Optional seqExists = handle.createQuery(sqlStatements.selectCurrentSequenceValue()) - .bind(0, sequenceName) - .mapTo(Long.class) - .findOne(); + .bind(0, sequenceName).mapTo(Long.class).findOne(); if (seqExists.isPresent()) { // Long newValue = seqExists.get() + 1; - handle.createUpdate(sqlStatements.resetSequenceValue()) - .bind(0, sequenceName) - .bind(1, newValue) - .execute(); + handle.createUpdate(sqlStatements.resetSequenceValue()).bind(0, sequenceName) + .bind(1, newValue).execute(); return newValue; } else { - handle.createUpdate(sqlStatements.insertSequenceValue()) - .bind(0, sequenceName) - .bind(1, 1) - .execute(); + handle.createUpdate(sqlStatements.insertSequenceValue()).bind(0, sequenceName) + .bind(1, 1).execute(); return 1L; } } @@ -3237,106 +2842,81 @@ private long nextSequenceValue(String sequenceName) { }); } - @Override @Transactional public boolean isContentExists(String contentHash) throws RegistryStorageException { return handles.withHandleNoException(handle -> { - return handle.createQuery(sqlStatements().selectContentCountByHash()) - .bind(0, contentHash) - .mapTo(Integer.class) - .one() > 0; + return handle.createQuery(sqlStatements().selectContentCountByHash()).bind(0, contentHash) + .mapTo(Integer.class).one() > 0; }); } - @Override @Transactional - public boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) throws RegistryStorageException { + public boolean isArtifactRuleExists(String groupId, String artifactId, RuleType rule) + throws RegistryStorageException { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectArtifactRuleCountByType()) - .bind(0, normalizeGroupId(groupId)) - .bind(1, artifactId) - .bind(2, rule.name()) - .mapTo(Integer.class) - .one() > 0; + .bind(0, normalizeGroupId(groupId)).bind(1, artifactId).bind(2, rule.name()) + .mapTo(Integer.class).one() > 0; }); } - @Override @Transactional public boolean isGlobalRuleExists(RuleType rule) throws RegistryStorageException { return handles.withHandleNoException(handle -> { - return handle.createQuery(sqlStatements().selectGlobalRuleCountByType()) - .bind(0, rule.name()) - .mapTo(Integer.class) - .one() > 0; + return handle.createQuery(sqlStatements().selectGlobalRuleCountByType()).bind(0, rule.name()) + .mapTo(Integer.class).one() > 0; }); } - @Override @Transactional public boolean isRoleMappingExists(String principalId) { return handles.withHandleNoException(handle -> { return handle.createQuery(sqlStatements().selectRoleMappingCountByPrincipal()) - .bind(0, principalId) - .mapTo(Integer.class) - .one() > 0; + .bind(0, principalId).mapTo(Integer.class).one() > 0; }); } - @Override @Transactional public void updateContentCanonicalHash(String newCanonicalHash, long contentId, String contentHash) { handles.withHandleNoException(handle -> { int rowCount = handle.createUpdate(sqlStatements().updateContentCanonicalHash()) - .bind(0, newCanonicalHash) - .bind(1, contentId) - .bind(2, contentHash) - .execute(); + .bind(0, newCanonicalHash).bind(1, contentId).bind(2, contentHash).execute(); if (rowCount == 0) { - log.warn("update content canonicalHash, no row match contentId {} contentHash {}", contentId, contentHash); + log.warn("update content canonicalHash, no row match contentId {} contentHash {}", contentId, + contentHash); } return null; }); } - @Override @Transactional public Optional contentIdFromHash(String contentHash) { return handles.withHandleNoException(handle -> { - return handle.createQuery(sqlStatements().selectContentIdByHash()) - .bind(0, contentHash) - .mapTo(Long.class) - .findOne(); + return handle.createQuery(sqlStatements().selectContentIdByHash()).bind(0, contentHash) + .mapTo(Long.class).findOne(); }); } - @Override @Transactional - public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, List versions) { + public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String description, + List versions) { try { String user = securityIdentity.getPrincipal().getName(); Date now = new Date(); handles.withHandle(handle -> { // Insert a row into the groups table - handle.createUpdate(sqlStatements.insertBranch()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .bind(3, description) - .bind(4, false) - .bind(5, user) - .bind(6, now) - .bind(7, user) - .bind(8, now) - .execute(); + handle.createUpdate(sqlStatements.insertBranch()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()) + .bind(3, description).bind(4, false).bind(5, user).bind(6, now).bind(7, user) + .bind(8, now).execute(); // Append each of the versions onto the branch if (versions != null) { @@ -3348,19 +2928,13 @@ public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String descripti return null; }); - return BranchMetaDataDto.builder() - .groupId(ga.getRawGroupId()) - .artifactId(ga.getRawArtifactId()) - .branchId(branchId.getRawBranchId()) - .description(description) - .owner(user) - .createdOn(now.getTime()) - .modifiedBy(user) - .modifiedOn(now.getTime()) - .build(); + return BranchMetaDataDto.builder().groupId(ga.getRawGroupId()).artifactId(ga.getRawArtifactId()) + .branchId(branchId.getRawBranchId()).description(description).owner(user) + .createdOn(now.getTime()).modifiedBy(user).modifiedOn(now.getTime()).build(); } catch (Exception ex) { if (sqlStatements.isPrimaryKeyViolation(ex)) { - throw new BranchAlreadyExistsException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), branchId.getRawBranchId()); + throw new BranchAlreadyExistsException(ga.getRawGroupIdWithDefaultString(), + ga.getRawArtifactId(), branchId.getRawBranchId()); } throw ex; } @@ -3371,20 +2945,17 @@ public BranchMetaDataDto createBranch(GA ga, BranchId branchId, String descripti public void updateBranchMetaData(GA ga, BranchId branchId, EditableBranchMetaDataDto dto) { String modifiedBy = securityIdentity.getPrincipal().getName(); Date modifiedOn = new Date(); - log.debug("Updating metadata for branch {} of {}/{}.", branchId, ga.getRawGroupIdWithNull(), ga.getRawArtifactId()); + log.debug("Updating metadata for branch {} of {}/{}.", branchId, ga.getRawGroupIdWithNull(), + ga.getRawArtifactId()); handles.withHandleNoException(handle -> { // Update the row in the groups table - int rows = handle.createUpdate(sqlStatements.updateBranch()) - .bind(0, dto.getDescription()) - .bind(1, modifiedBy) - .bind(2, modifiedOn) - .bind(3, ga.getRawGroupId()) - .bind(4, ga.getRawArtifactId()) - .bind(5, branchId.getRawBranchId()) - .execute(); + int rows = handle.createUpdate(sqlStatements.updateBranch()).bind(0, dto.getDescription()) + .bind(1, modifiedBy).bind(2, modifiedOn).bind(3, ga.getRawGroupId()) + .bind(4, ga.getRawArtifactId()).bind(5, branchId.getRawBranchId()).execute(); if (rows == 0) { - throw new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), branchId.getRawBranchId()); + throw new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + branchId.getRawBranchId()); } return null; @@ -3424,17 +2995,11 @@ public BranchSearchResultsDto getBranches(GA ga, int offset, int limit) { } // Query for the branc - String branchesQuerySql = new StringBuilder(selectTemplate) - .append(where) - .append(orderByQuery) - .append(limitOffset) - .toString() - .replace("{{selectColumns}}", "*"); + String branchesQuerySql = new StringBuilder(selectTemplate).append(where).append(orderByQuery) + .append(limitOffset).toString().replace("{{selectColumns}}", "*"); Query branchesQuery = handle.createQuery(branchesQuerySql); // Query for the total row count - String countQuerySql = new StringBuilder(selectTemplate) - .append(where) - .toString() + String countQuerySql = new StringBuilder(selectTemplate).append(where).toString() .replace("{{selectColumns}}", "count(b.branchId)"); Query countQuery = handle.createQuery(countQuerySql); @@ -3459,7 +3024,7 @@ public BranchSearchResultsDto getBranches(GA ga, int offset, int limit) { // Execute count query Integer count = countQuery.mapTo(Integer.class).one(); - // If no branches are found, it might be because the artifact does not exist. We + // If no branches are found, it might be because the artifact does not exist. We // need to check for that here. getArtifactMetaDataRaw(handle, ga.getRawGroupIdWithNull(), ga.getRawArtifactId()); @@ -3474,21 +3039,16 @@ public BranchSearchResultsDto getBranches(GA ga, int offset, int limit) { public BranchMetaDataDto getBranchMetaData(GA ga, BranchId branchId) { return handles.withHandle(handle -> { Optional res = handle.createQuery(sqlStatements.selectBranch()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .map(BranchMetaDataDtoMapper.instance) - .findOne(); - return res.orElseThrow(() -> new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), branchId.getRawBranchId())); + .bind(0, ga.getRawGroupId()).bind(1, ga.getRawArtifactId()) + .bind(2, branchId.getRawBranchId()).map(BranchMetaDataDtoMapper.instance).findOne(); + return res.orElseThrow(() -> new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), + ga.getRawArtifactId(), branchId.getRawBranchId())); }); } protected List getBranchVersionNumbersRaw(Handle handle, GA ga, BranchId branchId) { - return handle.createQuery(sqlStatements.selectBranchVersionNumbers()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .map(StringMapper.instance) + return handle.createQuery(sqlStatements.selectBranchVersionNumbers()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()).map(StringMapper.instance) .list(); } @@ -3503,9 +3063,9 @@ public VersionSearchResultsDto getBranchVersions(GA ga, BranchId branchId, int o StringBuilder limitOffset = new StringBuilder(); // Formulate the SELECT clause for the artifacts query - selectTemplate.append("SELECT {{selectColumns}} FROM branch_versions bv " + - "JOIN versions v ON bv.groupId = v.groupId AND bv.artifactId = v.artifactId AND bv.version = v.version " + - "JOIN artifacts a ON a.groupId = v.groupId AND a.artifactId = v.artifactId "); + selectTemplate.append("SELECT {{selectColumns}} FROM branch_versions bv " + + "JOIN versions v ON bv.groupId = v.groupId AND bv.artifactId = v.artifactId AND bv.version = v.version " + + "JOIN artifacts a ON a.groupId = v.groupId AND a.artifactId = v.artifactId "); // Formulate the WHERE clause for both queries where.append(" WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ?"); @@ -3530,17 +3090,11 @@ public VersionSearchResultsDto getBranchVersions(GA ga, BranchId branchId, int o } // Query for the versions - String versionsQuerySql = new StringBuilder(selectTemplate) - .append(where) - .append(orderByQuery) - .append(limitOffset) - .toString() - .replace("{{selectColumns}}", "v.*, a.type"); + String versionsQuerySql = new StringBuilder(selectTemplate).append(where).append(orderByQuery) + .append(limitOffset).toString().replace("{{selectColumns}}", "v.*, a.type"); Query versionsQuery = handle.createQuery(versionsQuerySql); // Query for the total row count - String countQuerySql = new StringBuilder(selectTemplate) - .append(where) - .toString() + String countQuerySql = new StringBuilder(selectTemplate).append(where).toString() .replace("{{selectColumns}}", "count(v.globalId)"); Query countQuery = handle.createQuery(countQuerySql); @@ -3584,7 +3138,8 @@ public void appendVersionToBranch(GA ga, BranchId branchId, VersionId version) { }); } catch (Exception ex) { if (sqlStatements.isPrimaryKeyViolation(ex)) { - throw new VersionAlreadyExistsOnBranchException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), version.getRawVersionId(), branchId.getRawBranchId()); + throw new VersionAlreadyExistsOnBranchException(ga.getRawGroupIdWithDefaultString(), + ga.getRawArtifactId(), version.getRawVersionId(), branchId.getRawBranchId()); } throw ex; } @@ -3593,21 +3148,18 @@ public void appendVersionToBranch(GA ga, BranchId branchId, VersionId version) { public void appendVersionToBranchRaw(Handle handle, GA ga, BranchId branchId, VersionId version) { try { // Insert a row into the groups table - handle.createUpdate(sqlStatements.appendBranchVersion()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .bind(3, version.getRawVersionId()) - .bind(4, ga.getRawGroupId()) - .bind(5, ga.getRawArtifactId()) - .bind(6, branchId.getRawBranchId()) - .execute(); + handle.createUpdate(sqlStatements.appendBranchVersion()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()) + .bind(3, version.getRawVersionId()).bind(4, ga.getRawGroupId()) + .bind(5, ga.getRawArtifactId()).bind(6, branchId.getRawBranchId()).execute(); } catch (Exception ex) { if (sqlStatements.isPrimaryKeyViolation(ex)) { - throw new VersionAlreadyExistsOnBranchException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), version.getRawVersionId(), branchId.getRawBranchId()); + throw new VersionAlreadyExistsOnBranchException(ga.getRawGroupIdWithDefaultString(), + ga.getRawArtifactId(), version.getRawVersionId(), branchId.getRawBranchId()); } if (sqlStatements.isForeignKeyViolation(ex)) { - throw new VersionNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), version.getRawVersionId()); + throw new VersionNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + version.getRawVersionId()); } throw ex; } @@ -3618,113 +3170,103 @@ public void appendVersionToBranchRaw(Handle handle, GA ga, BranchId branchId, Ve public void replaceBranchVersions(GA ga, BranchId branchId, List versions) { handles.withHandle(handle -> { // Delete all previous versions. - handle.createUpdate(sqlStatements.deleteBranchVersions()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .execute(); + handle.createUpdate(sqlStatements.deleteBranchVersions()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()).execute(); // Insert each version new int branchOrder = 0; for (VersionId version : versions) { - handle.createUpdate(sqlStatements.insertBranchVersion()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .bind(3, branchOrder++) - .bind(4, version.getRawVersionId()) - .execute(); + handle.createUpdate(sqlStatements.insertBranchVersion()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()) + .bind(3, branchOrder++).bind(4, version.getRawVersionId()).execute(); } return null; }); } -// -// @Override -// @Transactional -// public Map> getBranches(GA ga) { -// -// var data1 = handles.withHandleNoException(handle -> { -// -// if (!isArtifactExists(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId())) { -// throw new ArtifactNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId()); -// } -// -// return handle.createQuery(sqlStatements.selectBranches()) -// .bind(0, ga.getRawGroupId()) -// .bind(1, ga.getRawArtifactId()) -// .map(BranchDtoMapper.instance) -// .list(); -// }); -// -// var data2 = new HashMap>(); -// for (BranchDto dto : data1) { -// data2.compute(new BranchId(dto.getBranchId()), (_ignored, v) -> { -// if (v == null) { -// var initial = new ArrayList(); -// initial.add(dto); -// return initial; -// } else { -// v.add(dto); -// return v; -// } -// }); -// } -// -// var data3 = new HashMap>(); -// for (Entry> entry : data2.entrySet()) { -// data3.put(entry.getKey(), entry.getValue().stream() -// .sorted(Comparator.comparingInt(BranchDto::getBranchOrder).reversed()) // Highest first -// .map(BranchDto::toGAV) -// .collect(toList())); -// } -// -// return data3; -// } -// -// -// @Override -// @Transactional -// public List getBranch(GA ga, BranchId branchId, ArtifactRetrievalBehavior behavior) { -// -// String sql; -// switch (behavior) { -// case DEFAULT: -// sql = sqlStatements.selectBranchOrdered(); -// break; -// case SKIP_DISABLED_LATEST: -// sql = sqlStatements.selectBranchOrderedNotDisabled(); -// break; -// default: -// throw new UnreachableCodeException(); -// } -// var finalSql = sql; -// -// var res = handles.withHandleNoException(handle -> { -// -// return handle.createQuery(finalSql) -// .bind(0, ga.getRawGroupId()) -// .bind(1, ga.getRawArtifactId()) -// .bind(2, branchId.getRawBranchId()) -// .map(BranchDtoMapper.instance) -// .list() -// .stream() -// .map(BranchDto::toGAV) -// .collect(toList()); -// }); -// -// if (res.isEmpty()) { -// throw new BranchNotFoundException(ga, branchId); -// } -// -// return res; -// } - + // + // @Override + // @Transactional + // public Map> getBranches(GA ga) { + // + // var data1 = handles.withHandleNoException(handle -> { + // + // if (!isArtifactExists(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId())) { + // throw new ArtifactNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId()); + // } + // + // return handle.createQuery(sqlStatements.selectBranches()) + // .bind(0, ga.getRawGroupId()) + // .bind(1, ga.getRawArtifactId()) + // .map(BranchDtoMapper.instance) + // .list(); + // }); + // + // var data2 = new HashMap>(); + // for (BranchDto dto : data1) { + // data2.compute(new BranchId(dto.getBranchId()), (_ignored, v) -> { + // if (v == null) { + // var initial = new ArrayList(); + // initial.add(dto); + // return initial; + // } else { + // v.add(dto); + // return v; + // } + // }); + // } + // + // var data3 = new HashMap>(); + // for (Entry> entry : data2.entrySet()) { + // data3.put(entry.getKey(), entry.getValue().stream() + // .sorted(Comparator.comparingInt(BranchDto::getBranchOrder).reversed()) // Highest first + // .map(BranchDto::toGAV) + // .collect(toList())); + // } + // + // return data3; + // } + // + // + // @Override + // @Transactional + // public List getBranch(GA ga, BranchId branchId, ArtifactRetrievalBehavior behavior) { + // + // String sql; + // switch (behavior) { + // case DEFAULT: + // sql = sqlStatements.selectBranchOrdered(); + // break; + // case SKIP_DISABLED_LATEST: + // sql = sqlStatements.selectBranchOrderedNotDisabled(); + // break; + // default: + // throw new UnreachableCodeException(); + // } + // var finalSql = sql; + // + // var res = handles.withHandleNoException(handle -> { + // + // return handle.createQuery(finalSql) + // .bind(0, ga.getRawGroupId()) + // .bind(1, ga.getRawArtifactId()) + // .bind(2, branchId.getRawBranchId()) + // .map(BranchDtoMapper.instance) + // .list() + // .stream() + // .map(BranchDto::toGAV) + // .collect(toList()); + // }); + // + // if (res.isEmpty()) { + // throw new BranchNotFoundException(ga, branchId); + // } + // + // return res; + // } /** - * This method ensures that the named branch exists for the version *and* also adds the - * version to that branch. - * - * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. + * This method ensures that the named branch exists for the version *and* also adds the version to that + * branch. IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private void createOrUpdateBranchRaw(Handle handle, GAV gav, BranchId branchId, boolean systemDefined) { // First make sure the branch exists. @@ -3732,17 +3274,9 @@ private void createOrUpdateBranchRaw(Handle handle, GAV gav, BranchId branchId, String user = securityIdentity.getPrincipal().getName(); Date now = new Date(); - handle.createUpdate(sqlStatements.insertBranch()) - .bind(0, gav.getRawGroupId()) - .bind(1, gav.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .bind(3, (String) null) - .bind(4, systemDefined) - .bind(5, user) - .bind(6, now) - .bind(7, user) - .bind(8, now) - .execute(); + handle.createUpdate(sqlStatements.insertBranch()).bind(0, gav.getRawGroupId()) + .bind(1, gav.getRawArtifactId()).bind(2, branchId.getRawBranchId()).bind(3, (String) null) + .bind(4, systemDefined).bind(5, user).bind(6, now).bind(7, user).bind(8, now).execute(); } catch (Exception ex) { if (!sqlStatements.isPrimaryKeyViolation(ex)) { throw ex; @@ -3753,50 +3287,42 @@ private void createOrUpdateBranchRaw(Handle handle, GAV gav, BranchId branchId, appendVersionToBranchRaw(handle, gav, branchId, gav.getVersionId()); } - @Override @Transactional public GAV getBranchTip(GA ga, BranchId branchId, RetrievalBehavior behavior) { return handles.withHandleNoException(handle -> { switch (behavior) { case DEFAULT: - return handle.createQuery(sqlStatements.selectBranchTip()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .map(GAVMapper.instance) - .findOne() - .orElseThrow(() -> new VersionNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + return handle.createQuery(sqlStatements.selectBranchTip()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()) + .map(GAVMapper.instance).findOne() + .orElseThrow(() -> new VersionNotFoundException( + ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), "")); case SKIP_DISABLED_LATEST: return handle.createQuery(sqlStatements.selectBranchTipNotDisabled()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .map(GAVMapper.instance) - .findOne() - .orElseThrow(() -> new VersionNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), - "")); + .bind(0, ga.getRawGroupId()).bind(1, ga.getRawArtifactId()) + .bind(2, branchId.getRawBranchId()).map(GAVMapper.instance).findOne() + .orElseThrow(() -> new VersionNotFoundException( + ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + "")); } throw new UnreachableCodeException(); }); } - /** * IMPORTANT: Private methods can't be @Transactional. Callers MUST have started a transaction. */ private GAV getGAVByGlobalId(long globalId) { return handles.withHandle(handle -> { - return handle.createQuery(sqlStatements.selectGAVByGlobalId()) - .bind(0, globalId) - .map(GAVMapper.instance) - .findOne() + return handle.createQuery(sqlStatements.selectGAVByGlobalId()).bind(0, globalId) + .map(GAVMapper.instance).findOne() .orElseThrow(() -> new VersionNotFoundException(globalId)); }); } - @Override @Transactional public void deleteBranch(GA ga, BranchId branchId) { @@ -3805,19 +3331,16 @@ public void deleteBranch(GA ga, BranchId branchId) { } handles.withHandleNoException(handle -> { - var affected = handle.createUpdate(sqlStatements.deleteBranch()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .execute(); + var affected = handle.createUpdate(sqlStatements.deleteBranch()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()).execute(); if (affected == 0) { - throw new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), branchId.getRawBranchId()); + throw new BranchNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + branchId.getRawBranchId()); } }); } - @Override @Transactional public void importBranch(BranchEntity entity) { @@ -3825,19 +3348,14 @@ public void importBranch(BranchEntity entity) { var branchId = entity.toBranchId(); handles.withHandleNoException(handle -> { if (!isArtifactExists(entity.groupId, entity.artifactId)) { - throw new ArtifactNotFoundException(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId()); - } - handle.createUpdate(sqlStatements.insertBranch()) - .bind(0, ga.getRawGroupId()) - .bind(1, ga.getRawArtifactId()) - .bind(2, branchId.getRawBranchId()) - .bind(3, entity.description) - .bind(4, entity.systemDefined) - .bind(5, entity.owner) - .bind(6, new Date(entity.createdOn)) - .bind(7, entity.modifiedBy) - .bind(8, new Date(entity.modifiedOn)) - .execute(); + throw new ArtifactNotFoundException(ga.getRawGroupIdWithDefaultString(), + ga.getRawArtifactId()); + } + handle.createUpdate(sqlStatements.insertBranch()).bind(0, ga.getRawGroupId()) + .bind(1, ga.getRawArtifactId()).bind(2, branchId.getRawBranchId()) + .bind(3, entity.description).bind(4, entity.systemDefined).bind(5, entity.owner) + .bind(6, new Date(entity.createdOn)).bind(7, entity.modifiedBy) + .bind(8, new Date(entity.modifiedOn)).execute(); // Append each of the versions onto the branch if (entity.versions != null) { @@ -3850,7 +3368,8 @@ public void importBranch(BranchEntity entity) { @Override public String triggerSnapshotCreation() throws RegistryStorageException { - throw new RegistryStorageException("Directly triggering the snapshot creation is not supported for sql storages."); + throw new RegistryStorageException( + "Directly triggering the snapshot creation is not supported for sql storages."); } @Override @@ -3858,12 +3377,10 @@ public String createSnapshot(String location) throws RegistryStorageException { if (!StringUtil.isEmpty(location)) { log.debug("Creating internal database snapshot to location {}.", location); handles.withHandleNoException(handle -> { - handle.createQuery(sqlStatements.createDataSnapshot()) - .bind(0, location).mapTo(Integer.class); + handle.createQuery(sqlStatements.createDataSnapshot()).bind(0, location).mapTo(Integer.class); }); return location; - } - else { + } else { log.warn("Skipping database snapshot because no location has been provided"); } return null; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/CommonSqlStatements.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/CommonSqlStatements.java index de01af8667..9f81849c43 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/CommonSqlStatements.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/CommonSqlStatements.java @@ -41,7 +41,8 @@ public List databaseUpgrade(int fromVersion, int toVersion) { DdlParser parser = new DdlParser(); for (int version = fromVersion + 1; version <= toVersion; version++) { - try (InputStream input = getClass().getResourceAsStream("upgrades/" + version + "/" + dbType() + ".upgrade.ddl")) { + try (InputStream input = getClass() + .getResourceAsStream("upgrades/" + version + "/" + dbType() + ".upgrade.ddl")) { statements.addAll(parser.parse(input)); } catch (IOException e) { throw new RuntimeException(e); @@ -116,7 +117,6 @@ public String insertArtifact() { + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; } - /** * @see io.apicurio.registry.storage.impl.sql.SqlStatements#autoUpdateVersionForGlobalId() */ @@ -133,12 +133,12 @@ public String insertVersion(boolean firstVersion) { // TODO: Use COALESCE to unify into a single query. String query; if (firstVersion) { - query = "INSERT INTO versions (globalId, groupId, artifactId, version, versionOrder, state, name, description, owner, createdOn, modifiedBy, modifiedOn, labels, contentId)" + - " VALUES (?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + query = "INSERT INTO versions (globalId, groupId, artifactId, version, versionOrder, state, name, description, owner, createdOn, modifiedBy, modifiedOn, labels, contentId)" + + " VALUES (?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; } else { // NOTE: Duplicated value of versionOrder is prevented by UQ_versions_2 constraint. - query = "INSERT INTO versions (globalId, groupId, artifactId, version, versionOrder, state, name, description, owner, createdOn, modifiedBy, modifiedOn, labels, contentId)" + - " VALUES (?, ?, ?, ?, (SELECT MAX(versionOrder) + 1 FROM versions WHERE groupId = ? AND artifactId = ?), ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + query = "INSERT INTO versions (globalId, groupId, artifactId, version, versionOrder, state, name, description, owner, createdOn, modifiedBy, modifiedOn, labels, contentId)" + + " VALUES (?, ?, ?, ?, (SELECT MAX(versionOrder) + 1 FROM versions WHERE groupId = ? AND artifactId = ?), ?, ?, ?, ?, ?, ?, ?, ?, ?)"; } return query; } @@ -148,8 +148,7 @@ public String insertVersion(boolean firstVersion) { */ @Override public String selectArtifactVersionMetaDataByGlobalId() { - return "SELECT v.*, a.type " - + "FROM versions v " + return "SELECT v.*, a.type " + "FROM versions v " + "JOIN artifacts a ON v.groupId = a.groupId AND v.artifactId = a.artifactId " + "WHERE v.globalId = ?"; } @@ -182,8 +181,7 @@ public String selectArtifactVersionMetaData() { */ @Override public String selectArtifactVersionMetaDataByContentHash() { - return "SELECT v.*, a.type FROM versions v " - + "JOIN content c ON v.contentId = c.contentId " + return "SELECT v.*, a.type FROM versions v " + "JOIN content c ON v.contentId = c.contentId " + "JOIN artifacts a ON v.groupId = a.groupId AND v.artifactId = a.artifactId " + "WHERE v.groupId = ? AND v.artifactId = ? AND c.contentHash = ? ORDER BY v.globalId DESC"; } @@ -201,8 +199,7 @@ public String selectArtifactVersionMetaDataByContentId() { */ @Override public String selectArtifactVersionMetaDataByCanonicalHash() { - return "SELECT v.*, a.type FROM versions v " - + "JOIN content c ON v.contentId = c.contentId " + return "SELECT v.*, a.type FROM versions v " + "JOIN content c ON v.contentId = c.contentId " + "JOIN artifacts a ON v.groupId = a.groupId AND v.artifactId = a.artifactId " + "WHERE v.groupId = ? AND v.artifactId = ? AND c.canonicalHash = ? ORDER BY v.globalId DESC"; } @@ -213,8 +210,7 @@ public String selectArtifactVersionMetaDataByCanonicalHash() { @Override public String selectArtifactVersionContentByGlobalId() { return "SELECT v.globalId, v.version, v.versionOrder, v.contentId, c.content, c.contentType, c.refs FROM versions v " - + "JOIN content c ON v.contentId = c.contentId " - + "WHERE v.globalId = ?"; + + "JOIN content c ON v.contentId = c.contentId " + "WHERE v.globalId = ?"; } /** @@ -235,7 +231,6 @@ public String selectArtifactContentIds() { return "SELECT v.contentId FROM versions v WHERE v.groupId = ? AND v.artifactId = ? AND v.state != 'DISABLED' ORDER BY v.versionOrder"; } - @Override public String selectArtifactMetaData() { return "SELECT a.* FROM artifacts a WHERE a.groupId = ? AND a.artifactId = ?"; @@ -593,8 +588,7 @@ public String selectContentCountByHash() { */ @Override public String selectContentById() { - return "SELECT c.content, c.contentType, c.refs FROM content c " - + "WHERE c.contentId = ?"; + return "SELECT c.content, c.contentType, c.refs FROM content c " + "WHERE c.contentId = ?"; } /** @@ -602,8 +596,7 @@ public String selectContentById() { */ @Override public String selectContentByContentHash() { - return "SELECT c.content, c.contentType, c.refs FROM content c " - + "WHERE c.contentHash = ?"; + return "SELECT c.content, c.contentType, c.refs FROM content c " + "WHERE c.contentHash = ?"; } @Override @@ -665,9 +658,8 @@ public String deleteAllGroups() { */ @Override public String selectGroups() { - //TODO pagination? - return "SELECT g.* FROM groups g " - + "ORDER BY g.groupId ASC LIMIT ?"; + // TODO pagination? + return "SELECT g.* FROM groups g " + "ORDER BY g.groupId ASC LIMIT ?"; } /** @@ -731,13 +723,11 @@ public String exportGroups() { return "SELECT * FROM groups g "; } - @Override public String exportBranches() { return "SELECT * FROM branches"; } - /** * @see io.apicurio.registry.storage.impl.sql.SqlStatements#importArtifactRule() */ @@ -1014,8 +1004,7 @@ public String insertVersionComment() { @Override public String selectVersionComments() { - return "SELECT c.* " - + "FROM version_comments c JOIN versions v ON v.globalId = c.globalId " + return "SELECT c.* " + "FROM version_comments c JOIN versions v ON v.globalId = c.globalId " + "WHERE v.groupId = ? AND v.artifactId = ? AND v.version = ? ORDER BY c.createdOn DESC"; } @@ -1029,18 +1018,15 @@ public String updateVersionComment() { return "UPDATE version_comments SET cvalue = ? WHERE globalId = ? AND commentId = ? AND owner = ?"; } - @Override public String selectGAVByGlobalId() { - return "SELECT groupId, artifactId, version FROM versions " + - "WHERE globalId = ?"; + return "SELECT groupId, artifactId, version FROM versions " + "WHERE globalId = ?"; } - @Override public String insertBranch() { - return "INSERT INTO branches (groupId, artifactId, branchId, description, systemDefined, owner, createdOn, modifiedBy, modifiedOn) " + - "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; + return "INSERT INTO branches (groupId, artifactId, branchId, description, systemDefined, owner, createdOn, modifiedBy, modifiedOn) " + + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; } @Override @@ -1060,47 +1046,40 @@ public String selectBranchVersionNumbers() { @Override public String selectBranchTip() { - return "SELECT bv.groupId, bv.artifactId, bv.version FROM branch_versions bv " + - "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ? " + - "ORDER BY bv.branchOrder DESC LIMIT 1"; + return "SELECT bv.groupId, bv.artifactId, bv.version FROM branch_versions bv " + + "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ? " + + "ORDER BY bv.branchOrder DESC LIMIT 1"; } - @Override public String selectBranchTipNotDisabled() { - return "SELECT bv.groupId, bv.artifactId, bv.version " + - "FROM branch_versions bv " + - "JOIN versions v ON bv.groupId = v.groupId AND bv.artifactId = v.artifactId AND bv.version = v.version " + - "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ? AND v.state != 'DISABLED' " + - "ORDER BY bv.branchOrder DESC LIMIT 1"; + return "SELECT bv.groupId, bv.artifactId, bv.version " + "FROM branch_versions bv " + + "JOIN versions v ON bv.groupId = v.groupId AND bv.artifactId = v.artifactId AND bv.version = v.version " + + "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ? AND v.state != 'DISABLED' " + + "ORDER BY bv.branchOrder DESC LIMIT 1"; } - @Override public String insertBranchVersion() { - return "INSERT INTO branch_versions (groupId, artifactId, branchId, branchOrder, version) " + - "VALUES (?, ?, ?, ?, ?)"; + return "INSERT INTO branch_versions (groupId, artifactId, branchId, branchOrder, version) " + + "VALUES (?, ?, ?, ?, ?)"; } - @Override public String appendBranchVersion() { - return "INSERT INTO branch_versions (groupId, artifactId, branchId, branchOrder, version) " + - "SELECT ?, ?, ?, COALESCE(MAX(bv.branchOrder), 0) + 1, ? " + - "FROM branch_versions bv " + - "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ?"; + return "INSERT INTO branch_versions (groupId, artifactId, branchId, branchOrder, version) " + + "SELECT ?, ?, ?, COALESCE(MAX(bv.branchOrder), 0) + 1, ? " + "FROM branch_versions bv " + + "WHERE bv.groupId = ? AND bv.artifactId = ? AND bv.branchId = ?"; } @Override public String deleteBranchVersions() { - return "DELETE FROM branch_versions " + - "WHERE groupId = ? AND artifactId = ? AND branchId = ?"; + return "DELETE FROM branch_versions " + "WHERE groupId = ? AND artifactId = ? AND branchId = ?"; } @Override public String deleteBranch() { - return "DELETE FROM branches " + - "WHERE groupId = ? AND artifactId = ? AND branchId = ?"; + return "DELETE FROM branches " + "WHERE groupId = ? AND artifactId = ? AND branchId = ?"; } @Override diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/DdlParser.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/DdlParser.java index d4b20085ba..eddb8541f5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/DdlParser.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/DdlParser.java @@ -42,7 +42,7 @@ public List parse(InputStream ddlStream) throws IOException { String line; StringBuilder builder = new StringBuilder(); boolean isInMultiLineStatement = false; - while ( (line = reader.readLine()) != null) { + while ((line = reader.readLine()) != null) { if (line.startsWith("--")) { continue; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/H2SqlStatements.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/H2SqlStatements.java index 106251e493..adfef3dcf3 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/H2SqlStatements.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/H2SqlStatements.java @@ -1,8 +1,8 @@ package io.apicurio.registry.storage.impl.sql; /** - * H2 implementation of the sql statements interface. Provides sql statements that - * are specific to H2, where applicable. + * H2 implementation of the sql statements interface. Provides sql statements that are specific to H2, where + * applicable. */ public class H2SqlStatements extends CommonSqlStatements { @@ -33,7 +33,8 @@ public boolean isPrimaryKeyViolation(Exception error) { */ @Override public boolean isForeignKeyViolation(Exception error) { - return error.getMessage() != null && error.getMessage().contains("Referential integrity constraint violation"); + return error.getMessage() != null + && error.getMessage().contains("Referential integrity constraint violation"); } /** diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/HandleFactory.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/HandleFactory.java index 5732e33278..0bfa8ffc5f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/HandleFactory.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/HandleFactory.java @@ -8,9 +8,9 @@ public interface HandleFactory { /** * Execute an operation using a database handle. *

- * Handles are cached and reused if calls to this method are nested. - * Make sure that all nested uses of a handle are either within a transaction context, - * or without one. Starting a transaction with a nested handle will cause an exception. + * Handles are cached and reused if calls to this method are nested. Make sure that all nested uses of a + * handle are either within a transaction context, or without one. Starting a transaction with a nested + * handle will cause an exception. */ R withHandle(HandleCallback callback) throws X; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/IDbUpgrader.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/IDbUpgrader.java index f7150fbfc5..e7376c3ce9 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/IDbUpgrader.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/IDbUpgrader.java @@ -6,6 +6,7 @@ public interface IDbUpgrader { /** * Called by the {@link AbstractSqlRegistryStorage} class when upgrading the database. + * * @param dbHandle */ public void upgrade(Handle dbHandle) throws Exception; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/PostgreSQLSqlStatements.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/PostgreSQLSqlStatements.java index 11ab8ed2d9..10967b839e 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/PostgreSQLSqlStatements.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/PostgreSQLSqlStatements.java @@ -1,8 +1,8 @@ package io.apicurio.registry.storage.impl.sql; /** - * PostgreSQL implementation of the sql statements interface. Provides sql statements that - * are specific to PostgreSQL, where applicable. + * PostgreSQL implementation of the sql statements interface. Provides sql statements that are specific to + * PostgreSQL, where applicable. */ public class PostgreSQLSqlStatements extends CommonSqlStatements { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatabaseKind.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatabaseKind.java index e8d16d2c7d..c01620ec0d 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatabaseKind.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatabaseKind.java @@ -2,9 +2,8 @@ public enum RegistryDatabaseKind { - postgresql("org.postgresql.Driver"), - h2("org.h2.Driver"), - mssql("com.microsoft.sqlserver.jdbc.SQLServerDriver"); + postgresql("org.postgresql.Driver"), h2("org.h2.Driver"), mssql( + "com.microsoft.sqlserver.jdbc.SQLServerDriver"); final String driverClassName; diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatasourceProducer.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatasourceProducer.java index 0205e5972a..8099429b0f 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatasourceProducer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryDatasourceProducer.java @@ -65,10 +65,8 @@ public AgroalDataSource produceDatasource() throws SQLException { props.put(AgroalPropertiesReader.CREDENTIAL, password); props.put(AgroalPropertiesReader.PROVIDER_CLASS_NAME, databaseKind.getDriverClassName()); - AgroalDataSource datasource = AgroalDataSource.from(new AgroalPropertiesReader() - .readProperties(props) - .get()); - + AgroalDataSource datasource = AgroalDataSource + .from(new AgroalPropertiesReader().readProperties(props).get()); log.info("Using {} SQL storage.", databaseType); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryStorageContentUtils.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryStorageContentUtils.java index 3b4750abe4..d6be5f7579 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryStorageContentUtils.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/RegistryStorageContentUtils.java @@ -19,9 +19,7 @@ import java.util.function.Function; /** - * TODO Refactor - * TODO Cache calls to referenceResolver - * + * TODO Refactor TODO Cache calls to referenceResolver */ @ApplicationScoped public class RegistryStorageContentUtils { @@ -37,10 +35,10 @@ public class RegistryStorageContentUtils { * * @throws RegistryException in the case of an error. */ - public TypedContent canonicalizeContent(String artifactType, TypedContent content, Map resolvedReferences) { + public TypedContent canonicalizeContent(String artifactType, TypedContent content, + Map resolvedReferences) { try { - return factory.getArtifactTypeProvider(artifactType) - .getContentCanonicalizer() + return factory.getArtifactTypeProvider(artifactType).getContentCanonicalizer() .canonicalize(content, resolvedReferences); } catch (Exception ex) { // TODO: We should consider explicitly failing when a content could not be canonicalized. @@ -50,14 +48,14 @@ public TypedContent canonicalizeContent(String artifactType, TypedContent conten } } - /** * Canonicalize the given content. * * @throws RegistryException in the case of an error. */ - public TypedContent canonicalizeContent(String artifactType, TypedContent content, List references, - Function, Map> referenceResolver) { + public TypedContent canonicalizeContent(String artifactType, TypedContent content, + List references, + Function, Map> referenceResolver) { try { return canonicalizeContent(artifactType, content, referenceResolver.apply(references)); } catch (Exception ex) { @@ -65,18 +63,20 @@ public TypedContent canonicalizeContent(String artifactType, TypedContent conten } } - /** - * @param references may be null + * @param references may be null * @param referenceResolver may be null if references is null */ - public String getCanonicalContentHash(TypedContent content, String artifactType, List references, - Function, Map> referenceResolver) { + public String getCanonicalContentHash(TypedContent content, String artifactType, + List references, + Function, Map> referenceResolver) { try { if (notEmpty(references)) { String referencesSerialized = SqlUtil.serializeReferences(references); - TypedContent canonicalContent = canonicalizeContent(artifactType, content, referenceResolver.apply(references)); - return DigestUtils.sha256Hex(concatContentAndReferences(canonicalContent.getContent().bytes(), referencesSerialized)); + TypedContent canonicalContent = canonicalizeContent(artifactType, content, + referenceResolver.apply(references)); + return DigestUtils.sha256Hex(concatContentAndReferences(canonicalContent.getContent().bytes(), + referencesSerialized)); } else { TypedContent canonicalContent = canonicalizeContent(artifactType, content, Map.of()); return DigestUtils.sha256Hex(canonicalContent.getContent().bytes()); @@ -86,7 +86,6 @@ public String getCanonicalContentHash(TypedContent content, String artifactType, } } - /** * @param references may be null */ @@ -94,7 +93,8 @@ public String getContentHash(TypedContent content, List re try { if (notEmpty(references)) { String referencesSerialized = SqlUtil.serializeReferences(references); - return DigestUtils.sha256Hex(concatContentAndReferences(content.getContent().bytes(), referencesSerialized)); + return DigestUtils.sha256Hex( + concatContentAndReferences(content.getContent().bytes(), referencesSerialized)); } else { return DigestUtils.sha256Hex(content.getContent().bytes()); } @@ -103,11 +103,11 @@ public String getContentHash(TypedContent content, List re } } - private byte[] concatContentAndReferences(byte[] contentBytes, String references) throws IOException { if (references != null && !references.isEmpty()) { var referencesBytes = ContentHandle.create(references).bytes(); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(contentBytes.length + referencesBytes.length); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream( + contentBytes.length + referencesBytes.length); outputStream.write(contentBytes); outputStream.write(referencesBytes); return outputStream.toByteArray(); @@ -116,12 +116,10 @@ private byte[] concatContentAndReferences(byte[] contentBytes, String references } } - public String determineArtifactType(TypedContent content, String artifactTypeHint) { return ArtifactTypeUtil.determineArtifactType(content, artifactTypeHint, null, factory); } - public static boolean notEmpty(Collection collection) { return collection != null && !collection.isEmpty(); } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SQLServerSqlStatements.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SQLServerSqlStatements.java index 800fb8daac..7ed4c35ff5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SQLServerSqlStatements.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SQLServerSqlStatements.java @@ -1,8 +1,8 @@ package io.apicurio.registry.storage.impl.sql; /** - * MS SQL Server implementation of the SQL statements interface. Provides sql statements that - * are specific to MS SQL Server, where applicable. + * MS SQL Server implementation of the SQL statements interface. Provides sql statements that are specific to + * MS SQL Server, where applicable. */ public class SQLServerSqlStatements extends CommonSqlStatements { @@ -49,11 +49,9 @@ public String isDatabaseInitialized() { */ @Override public String upsertContent() { - return String.join(" ", - "MERGE INTO content AS target", + return String.join(" ", "MERGE INTO content AS target", "USING (VALUES (?, ?, ?, ?, ?, ?)) AS source (contentId, canonicalHash, contentHash, contentType, content, refs)", - "ON (target.contentHash = source.contentHash)", - "WHEN NOT MATCHED THEN", + "ON (target.contentHash = source.contentHash)", "WHEN NOT MATCHED THEN", "INSERT (contentId, canonicalHash, contentHash, contentType, content, refs)", "VALUES (source.contentId, source.canonicalHash, source.contentHash, source.contentType, source.content, source.refs);"); } @@ -63,16 +61,10 @@ public String upsertContent() { */ @Override public String getNextSequenceValue() { - return String.join(" ", - "MERGE INTO sequences AS target", - "USING (VALUES (?)) AS source (seqName)", - "ON (target.seqName = source.seqName)", - "WHEN MATCHED THEN", - "UPDATE SET seqValue = target.seqValue + 1", - "WHEN NOT MATCHED THEN", - "INSERT (seqName, seqValue)", - "VALUES (source.seqName, 1)", - "OUTPUT INSERTED.seqValue;"); + return String.join(" ", "MERGE INTO sequences AS target", "USING (VALUES (?)) AS source (seqName)", + "ON (target.seqName = source.seqName)", "WHEN MATCHED THEN", + "UPDATE SET seqValue = target.seqValue + 1", "WHEN NOT MATCHED THEN", + "INSERT (seqName, seqValue)", "VALUES (source.seqName, 1)", "OUTPUT INSERTED.seqValue;"); } /** @@ -80,15 +72,10 @@ public String getNextSequenceValue() { */ @Override public String resetSequenceValue() { - return String.join(" ", - "MERGE INTO sequences AS target", - "USING (VALUES (?, ?)) AS source (seqName, seqValue)", - "ON (target.seqName = source.seqName)", - "WHEN MATCHED THEN", - "UPDATE SET seqValue = ?", - "WHEN NOT MATCHED THEN", - "INSERT (seqName, seqValue)", - "VALUES (source.seqName, source.seqValue)", + return String.join(" ", "MERGE INTO sequences AS target", + "USING (VALUES (?, ?)) AS source (seqName, seqValue)", "ON (target.seqName = source.seqName)", + "WHEN MATCHED THEN", "UPDATE SET seqValue = ?", "WHEN NOT MATCHED THEN", + "INSERT (seqName, seqValue)", "VALUES (source.seqName, source.seqValue)", "OUTPUT INSERTED.seqValue;"); } @@ -97,12 +84,10 @@ public String resetSequenceValue() { */ @Override public String upsertContentReference() { - return String.join(" ", - "MERGE INTO content_references AS target", + return String.join(" ", "MERGE INTO content_references AS target", "USING (VALUES (?, ?, ?, ?, ?)) AS source (contentId, groupId, artifactId, version, name)", "ON (target.contentId = source.contentId AND target.name = source.name)", - "WHEN NOT MATCHED THEN", - "INSERT (contentId, groupId, artifactId, version, name)", + "WHEN NOT MATCHED THEN", "INSERT (contentId, groupId, artifactId, version, name)", "VALUES (source.contentId, source.groupId, source.artifactId, source.version, source.name);"); } @@ -119,24 +104,23 @@ public String selectArtifactIds() { */ @Override public String selectGroups() { - //TODO pagination? - return "SELECT TOP (?) * FROM groups " - + "ORDER BY groupId ASC"; + // TODO pagination? + return "SELECT TOP (?) * FROM groups " + "ORDER BY groupId ASC"; } @Override public String selectBranchTip() { - return "SELECT ab.groupId, ab.artifactId, ab.version FROM artifact_branches ab " + - "WHERE ab.groupId = ? AND ab.artifactId = ? AND ab.branchId = ? " + - "ORDER BY ab.branchOrder DESC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY"; + return "SELECT ab.groupId, ab.artifactId, ab.version FROM artifact_branches ab " + + "WHERE ab.groupId = ? AND ab.artifactId = ? AND ab.branchId = ? " + + "ORDER BY ab.branchOrder DESC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY"; } @Override public String selectBranchTipNotDisabled() { - return "SELECT ab.groupId, ab.artifactId, ab.version FROM artifact_branches ab " + - "JOIN versions v ON ab.groupId = v.groupId AND ab.artifactId = v.artifactId AND ab.version = v.version " + - "WHERE ab.groupId = ? AND ab.artifactId = ? AND ab.branchId = ? AND v.state != 'DISABLED' " + - "ORDER BY ab.branchOrder DESC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY"; + return "SELECT ab.groupId, ab.artifactId, ab.version FROM artifact_branches ab " + + "JOIN versions v ON ab.groupId = v.groupId AND ab.artifactId = v.artifactId AND ab.version = v.version " + + "WHERE ab.groupId = ? AND ab.artifactId = ? AND ab.branchId = ? AND v.state != 'DISABLED' " + + "ORDER BY ab.branchOrder DESC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY"; } @Override diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlRegistryStorage.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlRegistryStorage.java index dc2c748037..50370f2022 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlRegistryStorage.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlRegistryStorage.java @@ -10,7 +10,6 @@ /** * An in-memory SQL implementation of the {@link RegistryStorage} interface. - * */ @ApplicationScoped @PersistenceExceptionLivenessApply diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatements.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatements.java index 4f519f1462..c7456bef62 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatements.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatements.java @@ -3,8 +3,8 @@ import java.util.List; /** - * Returns SQL statements used by the JDB artifactStore implementation. There are different - * implementations of this interface depending on the database being used. + * Returns SQL statements used by the JDB artifactStore implementation. There are different implementations of + * this interface depending on the database being used. */ public interface SqlStatements { @@ -79,8 +79,8 @@ public interface SqlStatements { public String insertArtifact(); /** - * A statement used to update the 'version' column of the 'versions' table by globalId. The value of the "versionOrder" - * column is copied into the "version" column. + * A statement used to update the 'version' column of the 'versions' table by globalId. The value of the + * "versionOrder" column is copied into the "version" column. */ public String autoUpdateVersionForGlobalId(); @@ -155,7 +155,8 @@ public interface SqlStatements { public String selectArtifactVersionMetaData(); /** - * A statement to select the content of an artifact version from the versions table by artifactId + version. + * A statement to select the content of an artifact version from the versions table by artifactId + + * version. */ public String selectArtifactVersionContent(); @@ -174,13 +175,11 @@ public interface SqlStatements { */ public String updateContentCanonicalHash(); - /** * A statement to get a single artifact (latest version) meta-data by artifactId. */ public String selectArtifactMetaData(); - /** * A statement to select the contentId of a row in the content table by hash value. */ @@ -251,11 +250,14 @@ public interface SqlStatements { */ public String updateArtifactVersionNameByGAV(); + public String updateArtifactVersionDescriptionByGAV(); + public String updateArtifactVersionLabelsByGAV(); + public String updateArtifactVersionOwnerByGAV(); - public String updateArtifactVersionStateByGAV(); + public String updateArtifactVersionStateByGAV(); /** * A statement to delete all rows in the group_labels table for a given group. @@ -266,7 +268,7 @@ public interface SqlStatements { * A statement to delete all rows in the artifact_labels table for a given artifact. */ public String deleteArtifactLabels(); - + /** * A statement to delete the labels for a single artifact version. */ @@ -493,7 +495,6 @@ public interface SqlStatements { public String selectGlobalIdExists(); - /* * The next few statements support role mappings */ @@ -516,7 +517,6 @@ public interface SqlStatements { public String selectRoleMappingCountByPrincipal(); - /* * The next few statements support downloads. */ @@ -529,7 +529,6 @@ public interface SqlStatements { public String deleteExpiredDownloads(); - /* * The next few statements support config properties. */ @@ -562,10 +561,8 @@ public interface SqlStatements { public String updateVersionComment(); - // ========== Branches ========== - public String selectGAVByGlobalId(); public String insertBranch(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatementsProducer.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatementsProducer.java index b636bba948..f617990cc5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatementsProducer.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStatementsProducer.java @@ -20,7 +20,8 @@ public class SqlStatementsProducer { /** * Produces an {@link SqlStatements} instance for injection. */ - @Produces @ApplicationScoped + @Produces + @ApplicationScoped public SqlStatements createSqlStatements() { log.debug("Creating an instance of ISqlStatements for DB: " + databaseType); if ("h2".equals(databaseType)) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStorageEvent.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStorageEvent.java index e6af296303..bed74f6bbe 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStorageEvent.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/SqlStorageEvent.java @@ -1,9 +1,9 @@ package io.apicurio.registry.storage.impl.sql; public class SqlStorageEvent { - + private SqlStorageEventType type; - + /** * Constructor. */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/Handle.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/Handle.java index 59d409bcd5..9991e80b4e 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/Handle.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/Handle.java @@ -6,12 +6,14 @@ public interface Handle extends Closeable { /** * Create a new Query from the given SQL. + * * @param sql */ Query createQuery(String sql); /** * Create a new Update statement from the given SQL. + * * @param sql */ Update createUpdate(String sql); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/HandleImpl.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/HandleImpl.java index e139a4c1c3..416e8b9bed 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/HandleImpl.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/HandleImpl.java @@ -10,6 +10,7 @@ public class HandleImpl implements Handle { /** * Constructor. + * * @param connection */ public HandleImpl(Connection connection) { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/MappedQueryImpl.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/MappedQueryImpl.java index 50a56e73e0..ee86a53a7c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/MappedQueryImpl.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/MappedQueryImpl.java @@ -21,6 +21,7 @@ public class MappedQueryImpl implements MappedQuery, Closeable { /** * Constructor. + * * @param statement * @param mapper * @throws SQLException @@ -162,7 +163,8 @@ public List list() { */ @Override public Stream stream() { - return StreamSupport.stream(new Spliterators.AbstractSpliterator(Long.MAX_VALUE, Spliterator.IMMUTABLE | Spliterator.ORDERED | Spliterator.DISTINCT | Spliterator.NONNULL) { + return StreamSupport.stream(new Spliterators.AbstractSpliterator(Long.MAX_VALUE, + Spliterator.IMMUTABLE | Spliterator.ORDERED | Spliterator.DISTINCT | Spliterator.NONNULL) { @Override public boolean tryAdvance(Consumer action) { try { diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/QueryImpl.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/QueryImpl.java index 5b6d7c2120..0ad164a4b5 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/QueryImpl.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/QueryImpl.java @@ -1,19 +1,20 @@ package io.apicurio.registry.storage.impl.sql.jdb; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; - import io.apicurio.registry.storage.impl.sql.mappers.IntegerMapper; import io.apicurio.registry.storage.impl.sql.mappers.LongMapper; import io.apicurio.registry.storage.impl.sql.mappers.StringMapper; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; + public class QueryImpl extends SqlImpl implements Query { private int fetchSize = -1; /** * Constructor. + * * @param connection * @param sql */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/SqlParam.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/SqlParam.java index 77c1ccd6a7..1b32467b00 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/SqlParam.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/SqlParam.java @@ -14,6 +14,7 @@ public class SqlParam { /** * Constructor. + * * @param position * @param value * @param type @@ -26,10 +27,12 @@ public SqlParam(int position, Object value, SqlParamType type) { /** * Binds this SQL parameter to the given statement. + * * @param statement */ public void bindTo(PreparedStatement statement) { - int position = this.position + 1; // convert from sensible position (starts at 0) to JDBC position index (starts at 1) + int position = this.position + 1; // convert from sensible position (starts at 0) to JDBC position + // index (starts at 1) try { switch (type) { case BYTES: diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/UpdateImpl.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/UpdateImpl.java index e1291dcd8f..f07e5025aa 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/UpdateImpl.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/jdb/UpdateImpl.java @@ -8,6 +8,7 @@ public class UpdateImpl extends SqlImpl implements Update { /** * Constructor. + * * @param connection * @param sql */ diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactReferenceDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactReferenceDtoMapper.java index cdbe7a1ac1..a01ad09d4b 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactReferenceDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactReferenceDtoMapper.java @@ -1,12 +1,12 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.dto.ArtifactReferenceDto; import io.apicurio.registry.storage.impl.sql.SqlUtil; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; + public class ArtifactReferenceDtoMapper implements RowMapper { public static final ArtifactReferenceDtoMapper instance = new ArtifactReferenceDtoMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactRuleEntityMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactRuleEntityMapper.java index 5b12a43cf1..5916ecead0 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactRuleEntityMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ArtifactRuleEntityMapper.java @@ -1,13 +1,13 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.impl.sql.SqlUtil; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.impexp.ArtifactRuleEntity; +import java.sql.ResultSet; +import java.sql.SQLException; + public class ArtifactRuleEntityMapper implements RowMapper { public static final ArtifactRuleEntityMapper instance = new ArtifactRuleEntityMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchEntityMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchEntityMapper.java index b5bc3ff33a..bd75077838 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchEntityMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchEntityMapper.java @@ -11,23 +11,16 @@ public class BranchEntityMapper implements RowMapper { public static final BranchEntityMapper instance = new BranchEntityMapper(); - private BranchEntityMapper() { } - @Override public BranchEntity map(ResultSet rs) throws SQLException { - return BranchEntity.builder() - .groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) - .artifactId(rs.getString("artifactId")) - .branchId(rs.getString("branchId")) - .description(rs.getString("description")) - .systemDefined(rs.getBoolean("systemDefined")) - .owner(rs.getString("owner")) - .createdOn(rs.getTimestamp("createdOn").getTime()) - .modifiedBy(rs.getString("modifiedBy")) - .modifiedOn(rs.getTimestamp("modifiedOn").getTime()) + return BranchEntity.builder().groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) + .artifactId(rs.getString("artifactId")).branchId(rs.getString("branchId")) + .description(rs.getString("description")).systemDefined(rs.getBoolean("systemDefined")) + .owner(rs.getString("owner")).createdOn(rs.getTimestamp("createdOn").getTime()) + .modifiedBy(rs.getString("modifiedBy")).modifiedOn(rs.getTimestamp("modifiedOn").getTime()) .build(); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchMetaDataDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchMetaDataDtoMapper.java index eadb47c2d7..9a8ab6aef3 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchMetaDataDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/BranchMetaDataDtoMapper.java @@ -22,16 +22,11 @@ private BranchMetaDataDtoMapper() { */ @Override public BranchMetaDataDto map(ResultSet rs) throws SQLException { - return BranchMetaDataDto.builder() - .groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) - .artifactId(rs.getString("artifactId")) - .branchId(rs.getString("branchId")) - .description(rs.getString("description")) - .systemDefined(rs.getBoolean("systemDefined")) - .owner(rs.getString("owner")) - .createdOn(rs.getTimestamp("createdOn").getTime()) - .modifiedBy(rs.getString("modifiedBy")) - .modifiedOn(rs.getTimestamp("modifiedOn").getTime()) + return BranchMetaDataDto.builder().groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) + .artifactId(rs.getString("artifactId")).branchId(rs.getString("branchId")) + .description(rs.getString("description")).systemDefined(rs.getBoolean("systemDefined")) + .owner(rs.getString("owner")).createdOn(rs.getTimestamp("createdOn").getTime()) + .modifiedBy(rs.getString("modifiedBy")).modifiedOn(rs.getTimestamp("modifiedOn").getTime()) .build(); } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentDtoMapper.java index e5f80f71c2..324f17b702 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentDtoMapper.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.dto.CommentDto; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; + public class CommentDtoMapper implements RowMapper { public static final CommentDtoMapper instance = new CommentDtoMapper(); @@ -21,12 +21,8 @@ private CommentDtoMapper() { */ @Override public CommentDto map(ResultSet rs) throws SQLException { - return CommentDto.builder() - .commentId(rs.getString("commentId")) - .owner(rs.getString("owner")) - .createdOn(rs.getTimestamp("createdOn").getTime()) - .value(rs.getString("cvalue")) - .build(); + return CommentDto.builder().commentId(rs.getString("commentId")).owner(rs.getString("owner")) + .createdOn(rs.getTimestamp("createdOn").getTime()).value(rs.getString("cvalue")).build(); } } \ No newline at end of file diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentEntityMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentEntityMapper.java index af6c68e235..5c464ecc43 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentEntityMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/CommentEntityMapper.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; import io.apicurio.registry.utils.impexp.CommentEntity; +import java.sql.ResultSet; +import java.sql.SQLException; + public class CommentEntityMapper implements RowMapper { public static final CommentEntityMapper instance = new CommentEntityMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ContentEntityMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ContentEntityMapper.java index bae4ccda43..e83dadcf2c 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ContentEntityMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/ContentEntityMapper.java @@ -30,7 +30,7 @@ public ContentEntity map(ResultSet rs) throws SQLException { try { entity.serializedReferences = rs.getString("refs"); } catch (Exception e) { - //The old database does not have te references column, just ignore; + // The old database does not have te references column, just ignore; } return entity; } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/DynamicConfigPropertyDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/DynamicConfigPropertyDtoMapper.java index 147f473544..c21939f0f7 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/DynamicConfigPropertyDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/DynamicConfigPropertyDtoMapper.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.common.apps.config.DynamicConfigPropertyDto; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; + public class DynamicConfigPropertyDtoMapper implements RowMapper { public static final DynamicConfigPropertyDtoMapper instance = new DynamicConfigPropertyDtoMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GAVMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GAVMapper.java index 69f116608d..9071144876 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GAVMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GAVMapper.java @@ -1,7 +1,7 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; import io.apicurio.registry.model.GAV; +import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; import java.sql.ResultSet; import java.sql.SQLException; @@ -10,17 +10,11 @@ public class GAVMapper implements RowMapper { public static final GAVMapper instance = new GAVMapper(); - private GAVMapper() { } - @Override public GAV map(ResultSet rs) throws SQLException { - return new GAV( - rs.getString("groupId"), - rs.getString("artifactId"), - rs.getString("version") - ); + return new GAV(rs.getString("groupId"), rs.getString("artifactId"), rs.getString("version")); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GlobalRuleEntityMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GlobalRuleEntityMapper.java index a2f07c57ce..06ea58751b 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GlobalRuleEntityMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GlobalRuleEntityMapper.java @@ -1,12 +1,12 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; import io.apicurio.registry.types.RuleType; import io.apicurio.registry.utils.impexp.GlobalRuleEntity; +import java.sql.ResultSet; +import java.sql.SQLException; + public class GlobalRuleEntityMapper implements RowMapper { public static final GlobalRuleEntityMapper instance = new GlobalRuleEntityMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GroupMetaDataDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GroupMetaDataDtoMapper.java index 862da56c97..49b7dc47fd 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GroupMetaDataDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/GroupMetaDataDtoMapper.java @@ -1,13 +1,13 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Timestamp; - import io.apicurio.registry.storage.dto.GroupMetaDataDto; import io.apicurio.registry.storage.impl.sql.SqlUtil; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; + public class GroupMetaDataDtoMapper implements RowMapper { public static final GroupMetaDataDtoMapper instance = new GroupMetaDataDtoMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/IntegerMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/IntegerMapper.java index b3f78e0ae2..0e08a7eaf0 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/IntegerMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/IntegerMapper.java @@ -1,10 +1,10 @@ package io.apicurio.registry.storage.impl.sql.mappers; +import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; + import java.sql.ResultSet; import java.sql.SQLException; -import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; - public class IntegerMapper implements RowMapper { public static final IntegerMapper instance = new IntegerMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/LongMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/LongMapper.java index 418ccc4e2f..3bc1dc962b 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/LongMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/LongMapper.java @@ -1,10 +1,10 @@ package io.apicurio.registry.storage.impl.sql.mappers; +import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; + import java.sql.ResultSet; import java.sql.SQLException; -import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; - public class LongMapper implements RowMapper { public static final LongMapper instance = new LongMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RoleMappingDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RoleMappingDtoMapper.java index 2a6365401c..8491ef97ae 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RoleMappingDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RoleMappingDtoMapper.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.dto.RoleMappingDto; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; + public class RoleMappingDtoMapper implements RowMapper { public static final RoleMappingDtoMapper instance = new RoleMappingDtoMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RuleConfigurationDtoMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RuleConfigurationDtoMapper.java index 797d72d20b..280d054561 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RuleConfigurationDtoMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/RuleConfigurationDtoMapper.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql.mappers; -import java.sql.ResultSet; -import java.sql.SQLException; - import io.apicurio.registry.storage.dto.RuleConfigurationDto; import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; +import java.sql.ResultSet; +import java.sql.SQLException; + public class RuleConfigurationDtoMapper implements RowMapper { public static final RuleConfigurationDtoMapper instance = new RuleConfigurationDtoMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/SearchedBranchMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/SearchedBranchMapper.java index ec4a4d2906..d394c91226 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/SearchedBranchMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/SearchedBranchMapper.java @@ -22,16 +22,11 @@ private SearchedBranchMapper() { */ @Override public SearchedBranchDto map(ResultSet rs) throws SQLException { - return SearchedBranchDto.builder() - .groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) - .artifactId(rs.getString("artifactId")) - .branchId(rs.getString("branchId")) - .description(rs.getString("description")) - .systemDefined(rs.getBoolean("systemDefined")) - .owner(rs.getString("owner")) - .createdOn(rs.getTimestamp("createdOn").getTime()) - .modifiedBy(rs.getString("modifiedBy")) - .modifiedOn(rs.getTimestamp("modifiedOn").getTime()) + return SearchedBranchDto.builder().groupId(SqlUtil.denormalizeGroupId(rs.getString("groupId"))) + .artifactId(rs.getString("artifactId")).branchId(rs.getString("branchId")) + .description(rs.getString("description")).systemDefined(rs.getBoolean("systemDefined")) + .owner(rs.getString("owner")).createdOn(rs.getTimestamp("createdOn").getTime()) + .modifiedBy(rs.getString("modifiedBy")).modifiedOn(rs.getTimestamp("modifiedOn").getTime()) .build(); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StoredArtifactMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StoredArtifactMapper.java index c1e127418a..e51d3204f7 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StoredArtifactMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StoredArtifactMapper.java @@ -23,14 +23,10 @@ private StoredArtifactMapper() { */ @Override public StoredArtifactVersionDto map(ResultSet rs) throws SQLException { - return StoredArtifactVersionDto.builder() - .content(ContentHandle.create(rs.getBytes("content"))) - .contentType(rs.getString("contentType")) - .contentId(rs.getLong("contentId")) - .globalId(rs.getLong("globalId")) - .version(rs.getString("version")) + return StoredArtifactVersionDto.builder().content(ContentHandle.create(rs.getBytes("content"))) + .contentType(rs.getString("contentType")).contentId(rs.getLong("contentId")) + .globalId(rs.getLong("globalId")).version(rs.getString("version")) .versionOrder(rs.getInt("versionOrder")) - .references(SqlUtil.deserializeReferences(rs.getString("refs"))) - .build(); + .references(SqlUtil.deserializeReferences(rs.getString("refs"))).build(); } } diff --git a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StringMapper.java b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StringMapper.java index dd1f5a784b..c76e7d8883 100644 --- a/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StringMapper.java +++ b/app/src/main/java/io/apicurio/registry/storage/impl/sql/mappers/StringMapper.java @@ -1,10 +1,10 @@ package io.apicurio.registry.storage.impl.sql.mappers; +import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; + import java.sql.ResultSet; import java.sql.SQLException; -import io.apicurio.registry.storage.impl.sql.jdb.RowMapper; - public class StringMapper implements RowMapper { public static final StringMapper instance = new StringMapper(); diff --git a/app/src/main/java/io/apicurio/registry/storage/importing/AbstractDataImporter.java b/app/src/main/java/io/apicurio/registry/storage/importing/AbstractDataImporter.java index ea3c85dbe9..6799ba64ec 100644 --- a/app/src/main/java/io/apicurio/registry/storage/importing/AbstractDataImporter.java +++ b/app/src/main/java/io/apicurio/registry/storage/importing/AbstractDataImporter.java @@ -53,11 +53,11 @@ public void importEntity(Entity entity) { // Ignore the manifest for now. break; default: - throw new RegistryStorageException("Unhandled entity type during import: " + entity.getEntityType()); + throw new RegistryStorageException( + "Unhandled entity type during import: " + entity.getEntityType()); } } - protected abstract void importArtifactRule(ArtifactRuleEntity entity); protected abstract void importArtifact(ArtifactEntity entity); diff --git a/app/src/main/java/io/apicurio/registry/storage/importing/SqlDataImporter.java b/app/src/main/java/io/apicurio/registry/storage/importing/SqlDataImporter.java index abf7895fe0..64a0527ce3 100644 --- a/app/src/main/java/io/apicurio/registry/storage/importing/SqlDataImporter.java +++ b/app/src/main/java/io/apicurio/registry/storage/importing/SqlDataImporter.java @@ -18,7 +18,6 @@ import java.util.*; import java.util.stream.Collectors; - public class SqlDataImporter extends AbstractDataImporter { protected RegistryStorageContentUtils utils; @@ -44,7 +43,7 @@ public class SqlDataImporter extends AbstractDataImporter { private final Map> artifactBranchesWaitingForVersion = new HashMap<>(); public SqlDataImporter(Logger logger, RegistryStorageContentUtils utils, RegistryStorage storage, - boolean preserveGlobalId, boolean preserveContentId) { + boolean preserveGlobalId, boolean preserveContentId) { super(logger); this.utils = utils; this.storage = storage; @@ -52,7 +51,6 @@ public SqlDataImporter(Logger logger, RegistryStorageContentUtils utils, Registr this.preserveContentId = preserveContentId; } - @Override public void importArtifactRule(ArtifactRuleEntity entity) { try { @@ -69,7 +67,8 @@ protected void importArtifact(ArtifactEntity entity) { storage.importArtifact(entity); log.debug("Artifact imported successfully: {}", entity); } catch (Exception ex) { - log.warn("Failed to import artifact {} / {}: {}", entity.groupId, entity.artifactId, ex.getMessage()); + log.warn("Failed to import artifact {} / {}: {}", entity.groupId, entity.artifactId, + ex.getMessage()); } } @@ -90,7 +89,6 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { entity.globalId = storage.nextGlobalId(); } - storage.importArtifactVersion(entity); log.debug("Artifact version imported successfully: {}", entity); globalIdMapping.put(oldGlobalId, entity.globalId); @@ -99,8 +97,7 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { // Import comments that were waiting for this version var commentsToImport = waitingForVersion.stream() - .filter(comment -> comment.globalId == oldGlobalId) - .collect(Collectors.toList()); + .filter(comment -> comment.globalId == oldGlobalId).collect(Collectors.toList()); for (CommentEntity commentEntity : commentsToImport) { importComment(commentEntity); } @@ -113,7 +110,8 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { } catch (VersionAlreadyExistsException ex) { if (ex.getGlobalId() != null) { - log.warn("Duplicate globalId {} detected, skipping import of artifact version: {}", ex.getGlobalId(), entity); + log.warn("Duplicate globalId {} detected, skipping import of artifact version: {}", + ex.getGlobalId(), entity); } else { log.warn("Failed to import artifact version {}: {}", entity, ex.getMessage()); } @@ -122,27 +120,26 @@ public void importArtifactVersion(ArtifactVersionEntity entity) { } } - @Override public void importContent(ContentEntity entity) { try { - List references = SqlUtil.deserializeReferences(entity.serializedReferences); + List references = SqlUtil + .deserializeReferences(entity.serializedReferences); if (entity.contentType == null) { throw new RuntimeException("ContentEntity is missing required field: contentType"); } - TypedContent typedContent = TypedContent.create(ContentHandle.create(entity.contentBytes), entity.contentType); + TypedContent typedContent = TypedContent.create(ContentHandle.create(entity.contentBytes), + entity.contentType); // We do not need canonicalHash if we have artifactType if (entity.canonicalHash == null && entity.artifactType != null) { - TypedContent canonicalContent = utils.canonicalizeContent( - entity.artifactType, typedContent, + TypedContent canonicalContent = utils.canonicalizeContent(entity.artifactType, typedContent, storage.resolveReferences(references)); entity.canonicalHash = DigestUtils.sha256Hex(canonicalContent.getContent().bytes()); } - var oldContentId = entity.contentId; if (!preserveContentId) { entity.contentId = storage.nextContentId(); @@ -169,7 +166,6 @@ public void importContent(ContentEntity entity) { } } - @Override public void importGlobalRule(GlobalRuleEntity entity) { try { @@ -180,7 +176,6 @@ public void importGlobalRule(GlobalRuleEntity entity) { } } - @Override public void importGroup(GroupEntity entity) { try { @@ -191,12 +186,11 @@ public void importGroup(GroupEntity entity) { } } - @Override public void importComment(CommentEntity entity) { try { if (!globalIdMapping.containsKey(entity.globalId)) { - // The version hasn't been imported yet. Need to wait for it. + // The version hasn't been imported yet. Need to wait for it. waitingForVersion.add(entity); return; } @@ -209,7 +203,6 @@ public void importComment(CommentEntity entity) { } } - @Override protected void importBranch(BranchEntity entity) { try { diff --git a/app/src/main/java/io/apicurio/registry/storage/metrics/StorageMetricsStore.java b/app/src/main/java/io/apicurio/registry/storage/metrics/StorageMetricsStore.java index cca844d8b1..d1ff592a27 100644 --- a/app/src/main/java/io/apicurio/registry/storage/metrics/StorageMetricsStore.java +++ b/app/src/main/java/io/apicurio/registry/storage/metrics/StorageMetricsStore.java @@ -20,15 +20,14 @@ import java.util.concurrent.atomic.AtomicLong; /** - * This class provides a set of counters. Counters such as "number of artifacts" - * This counters have to be "distributed" or at least work in a clustered deployment. - * Currently, this implementation uses {@link Cache} for storing the counters, - * it's "auto-eviction" nature allows to re-initialize the counters with information from the database periodically, - * making it "useful" for clustered deployments. + * This class provides a set of counters. Counters such as "number of artifacts" This counters have to be + * "distributed" or at least work in a clustered deployment. Currently, this implementation uses {@link Cache} + * for storing the counters, it's "auto-eviction" nature allows to re-initialize the counters with information + * from the database periodically, making it "useful" for clustered deployments. *

- * This implementation is far from perfect, ideally redis or some other externalized cache should be used, but for now - * this implementation could work, it's extremely simple and it does not require the deployment of external infrastructure. - * + * This implementation is far from perfect, ideally redis or some other externalized cache should be used, but + * for now this implementation could work, it's extremely simple and it does not require the deployment of + * external infrastructure. */ @ApplicationScoped public class StorageMetricsStore { @@ -53,7 +52,8 @@ public class StorageMetricsStore { @Current RegistryStorage storage; - //NOTE all of this could be changed in the future with a global cache shared between all registry replicas + // NOTE all of this could be changed in the future with a global cache shared between all registry + // replicas private LoadingCache countersCache; private LoadingCache artifactVersionsCounters; @@ -87,28 +87,24 @@ public AtomicLong load(@NotNull String key) { } }; - countersCache = CacheBuilder - .newBuilder() - .expireAfterWrite(limitsCheckPeriod, TimeUnit.MILLISECONDS) - .maximumSize(cacheMaxSize) - .build(totalSchemaCountersLoader); + countersCache = CacheBuilder.newBuilder().expireAfterWrite(limitsCheckPeriod, TimeUnit.MILLISECONDS) + .maximumSize(cacheMaxSize).build(totalSchemaCountersLoader); } - private void createTotalArtifactVersionsCache() { artifactVersionsCountersLoader = new CacheLoader<>() { @Override public AtomicLong load(@NotNull ArtifactVersionKey artifactVersionKey) { - log.info("Initializing total artifact versions counter for artifact gid {} ai {}", artifactVersionKey.groupId, artifactVersionKey.artifactId); - long count = storage.countArtifactVersions(artifactVersionKey.groupId, artifactVersionKey.artifactId); + log.info("Initializing total artifact versions counter for artifact gid {} ai {}", + artifactVersionKey.groupId, artifactVersionKey.artifactId); + long count = storage.countArtifactVersions(artifactVersionKey.groupId, + artifactVersionKey.artifactId); return new AtomicLong(count); } }; - artifactVersionsCounters = CacheBuilder - .newBuilder() - .expireAfterWrite(limitsCheckPeriod, TimeUnit.MILLISECONDS) - .maximumSize(cacheMaxSize) + artifactVersionsCounters = CacheBuilder.newBuilder() + .expireAfterWrite(limitsCheckPeriod, TimeUnit.MILLISECONDS).maximumSize(cacheMaxSize) .build(artifactVersionsCountersLoader); } @@ -132,7 +128,7 @@ public void incrementTotalSchemasCounter() { log.info("Incrementing total schemas counter"); AtomicLong counter = countersCache.getUnchecked(TOTAL_SCHEMAS_KEY); if (counter == null) { - //cached counter expired, do nothing, it will be reloaded from DB on the next read + // cached counter expired, do nothing, it will be reloaded from DB on the next read return; } else { counter.incrementAndGet(); @@ -142,7 +138,7 @@ public void incrementTotalSchemasCounter() { public void incrementArtifactsCounter() { AtomicLong counter = countersCache.getUnchecked(ARTIFACT_COUNTER); if (counter == null) { - //cached counter expired, do nothing, it will be reloaded from DB on the next read + // cached counter expired, do nothing, it will be reloaded from DB on the next read return; } else { counter.incrementAndGet(); @@ -155,7 +151,7 @@ public void incrementArtifactVersionsCounter(String groupId, String artifactId) avk.artifactId = artifactId; AtomicLong counter = artifactVersionsCounters.getUnchecked(avk); if (counter == null) { - //cached counter expired, do nothing, it will be reloaded from DB on the next read + // cached counter expired, do nothing, it will be reloaded from DB on the next read return; } else { counter.incrementAndGet(); diff --git a/app/src/main/java/io/apicurio/registry/types/Current.java b/app/src/main/java/io/apicurio/registry/types/Current.java index ff36c1c39e..b4d5311c64 100644 --- a/app/src/main/java/io/apicurio/registry/types/Current.java +++ b/app/src/main/java/io/apicurio/registry/types/Current.java @@ -1,18 +1,18 @@ package io.apicurio.registry.types; -import static java.lang.annotation.RetentionPolicy.RUNTIME; +import jakarta.inject.Qualifier; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.Target; -import jakarta.inject.Qualifier; + +import static java.lang.annotation.RetentionPolicy.RUNTIME; /** * Marks currently "used" bean. - * */ @Qualifier @Retention(RUNTIME) -@Target({ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER}) +@Target({ ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER }) public @interface Current { } diff --git a/app/src/main/java/io/apicurio/registry/types/Raw.java b/app/src/main/java/io/apicurio/registry/types/Raw.java index c42dafa4ed..765cdd9035 100644 --- a/app/src/main/java/io/apicurio/registry/types/Raw.java +++ b/app/src/main/java/io/apicurio/registry/types/Raw.java @@ -10,10 +10,9 @@ /** * Marks undecorated or otherwise unmodified bean. - * */ @Qualifier @Retention(RUNTIME) -@Target({ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER}) +@Target({ ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER }) public @interface Raw { } diff --git a/app/src/main/java/io/apicurio/registry/ui/URLUtil.java b/app/src/main/java/io/apicurio/registry/ui/URLUtil.java index 1963858e6c..65a8bd21e9 100644 --- a/app/src/main/java/io/apicurio/registry/ui/URLUtil.java +++ b/app/src/main/java/io/apicurio/registry/ui/URLUtil.java @@ -13,17 +13,16 @@ /** * Utility to generate absolute URLs. - * */ @ApplicationScoped public class URLUtil { @ConfigProperty(name = "apicurio.url.override.host") - @Info(category = "redirects", description = "Override the hostname used for generating externally-accessible URLs. " + - "The host and port overrides are useful when deploying Registry with HTTPS passthrough Ingress or Route. " + - "In cases like these, the request URL (and port) that is then re-used for redirection " + - "does not belong to actual external URL used by the client, because the request is proxied. " + - "The redirection then fails because the target URL is not reachable.", availableSince = "2.5.0.Final") + @Info(category = "redirects", description = "Override the hostname used for generating externally-accessible URLs. " + + "The host and port overrides are useful when deploying Registry with HTTPS passthrough Ingress or Route. " + + "In cases like these, the request URL (and port) that is then re-used for redirection " + + "does not belong to actual external URL used by the client, because the request is proxied. " + + "The redirection then fails because the target URL is not reachable.", availableSince = "2.5.0.Final") Optional urlOverrideHost; @ConfigProperty(name = "apicurio.url.override.port") @@ -34,9 +33,9 @@ public class URLUtil { Logger log; /** - * Given a relative path to a resource on this Registry server, - * try to produce an externally-accessible absolute URL to it, based on the request or configuration. - * This is useful for redirects and generating URLs for clients. + * Given a relative path to a resource on this Registry server, try to produce an externally-accessible + * absolute URL to it, based on the request or configuration. This is useful for redirects and generating + * URLs for clients. */ public URL getExternalAbsoluteURL(HttpServletRequest request, String relativePath) { @@ -55,7 +54,8 @@ public URL getExternalAbsoluteURL(HttpServletRequest request, String relativePat // Protocol targetProtocol = requestURL.getProtocol(); if ("http".equals(targetProtocol) && request.isSecure()) { - log.debug("Generating absolute URL: Switching from HTTP to HTTPS protocol for a secure request."); + log.debug( + "Generating absolute URL: Switching from HTTP to HTTPS protocol for a secure request."); targetProtocol = "https"; } if (forwardedProtoHeaderValue != null && !forwardedProtoHeaderValue.isBlank()) { @@ -91,8 +91,9 @@ public URL getExternalAbsoluteURL(HttpServletRequest request, String relativePat return targetURL; } catch (MalformedURLException ex) { - throw new RuntimeException(String.format("Could not generate a valid absolute URL from: " + - "protocol = '%s', host = '%s', port = '%s', and relativePath = '%s'.", + throw new RuntimeException(String.format( + "Could not generate a valid absolute URL from: " + + "protocol = '%s', host = '%s', port = '%s', and relativePath = '%s'.", targetProtocol, targetHost, targetPort, relativePath), ex); } } diff --git a/app/src/main/java/io/apicurio/registry/ui/UserInterfaceConfigProperties.java b/app/src/main/java/io/apicurio/registry/ui/UserInterfaceConfigProperties.java index ce4407d400..290308e4ba 100644 --- a/app/src/main/java/io/apicurio/registry/ui/UserInterfaceConfigProperties.java +++ b/app/src/main/java/io/apicurio/registry/ui/UserInterfaceConfigProperties.java @@ -1,9 +1,8 @@ package io.apicurio.registry.ui; -import org.eclipse.microprofile.config.inject.ConfigProperty; - import io.apicurio.common.apps.config.Info; import jakarta.inject.Singleton; +import org.eclipse.microprofile.config.inject.ConfigProperty; @Singleton public class UserInterfaceConfigProperties { @@ -17,8 +16,7 @@ public class UserInterfaceConfigProperties { @ConfigProperty(name = "apicurio.ui.docsUrl", defaultValue = "/docs/") @Info(category = "ui", description = "URL of the Documentation component", availableSince = "3.0.0") public String docsUrl; - - + @ConfigProperty(name = "quarkus.oidc.auth-server-url") public String authOidcUrl; @ConfigProperty(name = "apicurio.ui.auth.oidc.redirect-uri", defaultValue = "/") @@ -28,7 +26,6 @@ public class UserInterfaceConfigProperties { @Info(category = "ui", description = "The OIDC clientId", availableSince = "3.0.0") public String authOidcClientId; - @ConfigProperty(name = "apicurio.ui.features.read-only.enabled", defaultValue = "false") @Info(category = "ui", description = "Enabled to set the UI to read-only mode", availableSince = "3.0.0") public String featureReadOnly; diff --git a/app/src/main/java/io/apicurio/registry/ui/servlets/ApiDocsServlet.java b/app/src/main/java/io/apicurio/registry/ui/servlets/ApiDocsServlet.java index a88d319a0f..fad619a6a1 100644 --- a/app/src/main/java/io/apicurio/registry/ui/servlets/ApiDocsServlet.java +++ b/app/src/main/java/io/apicurio/registry/ui/servlets/ApiDocsServlet.java @@ -1,15 +1,15 @@ package io.apicurio.registry.ui.servlets; -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - import jakarta.servlet.GenericServlet; import jakarta.servlet.ServletException; import jakarta.servlet.ServletRequest; import jakarta.servlet.ServletResponse; import jakarta.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + /** * A simple servlet that forwards the request to the apidocs.html file. */ @@ -28,7 +28,8 @@ private static final boolean isRootPath(String servletPath) { } /** - * @see jakarta.servlet.GenericServlet#service(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse) + * @see jakarta.servlet.GenericServlet#service(jakarta.servlet.ServletRequest, + * jakarta.servlet.ServletResponse) */ @Override public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException { diff --git a/app/src/main/java/io/apicurio/registry/ui/servlets/HSTSFilter.java b/app/src/main/java/io/apicurio/registry/ui/servlets/HSTSFilter.java index e44ab50e47..f44212f453 100644 --- a/app/src/main/java/io/apicurio/registry/ui/servlets/HSTSFilter.java +++ b/app/src/main/java/io/apicurio/registry/ui/servlets/HSTSFilter.java @@ -1,6 +1,5 @@ package io.apicurio.registry.ui.servlets; -import java.io.IOException; import jakarta.servlet.Filter; import jakarta.servlet.FilterChain; import jakarta.servlet.FilterConfig; @@ -9,9 +8,10 @@ import jakarta.servlet.ServletResponse; import jakarta.servlet.http.HttpServletResponse; +import java.io.IOException; + /** - * Add HSTS headers to all HTTP responses. Browser will ignore the header if the connection - * is not secure. + * Add HSTS headers to all HTTP responses. Browser will ignore the header if the connection is not secure. */ public class HSTSFilter implements Filter { @@ -36,10 +36,12 @@ public void init(FilterConfig config) throws ServletException { } /** - * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, jakarta.servlet.FilterChain) + * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, + * jakarta.servlet.FilterChain) */ @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { HttpServletResponse httpResponse = (HttpServletResponse) response; addHstsHeaders(httpResponse); chain.doFilter(request, response); diff --git a/app/src/main/java/io/apicurio/registry/ui/servlets/RedirectFilter.java b/app/src/main/java/io/apicurio/registry/ui/servlets/RedirectFilter.java index 8c5ba11004..b046ab5c3e 100644 --- a/app/src/main/java/io/apicurio/registry/ui/servlets/RedirectFilter.java +++ b/app/src/main/java/io/apicurio/registry/ui/servlets/RedirectFilter.java @@ -14,7 +14,6 @@ import java.util.HashMap; import java.util.Map; - @ApplicationScoped public class RedirectFilter implements Filter { @@ -52,8 +51,8 @@ public void init(FilterConfig filterConfig) throws ServletException { } /** - * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, - * jakarta.servlet.ServletResponse, jakarta.servlet.FilterChain) + * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, + * jakarta.servlet.FilterChain) */ @Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) @@ -69,7 +68,8 @@ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) } if (redirects.containsKey(servletPath)) { - response.sendRedirect(urlUtil.getExternalAbsoluteURL(request, redirects.get(servletPath)).toString()); + response.sendRedirect( + urlUtil.getExternalAbsoluteURL(request, redirects.get(servletPath)).toString()); return; } } diff --git a/app/src/main/java/io/apicurio/registry/ui/servlets/ResourceCacheControlFilter.java b/app/src/main/java/io/apicurio/registry/ui/servlets/ResourceCacheControlFilter.java index 5fce8253b8..6cded31dc5 100644 --- a/app/src/main/java/io/apicurio/registry/ui/servlets/ResourceCacheControlFilter.java +++ b/app/src/main/java/io/apicurio/registry/ui/servlets/ResourceCacheControlFilter.java @@ -1,8 +1,5 @@ package io.apicurio.registry.ui.servlets; -import java.io.IOException; -import java.util.Date; - import jakarta.servlet.Filter; import jakarta.servlet.FilterChain; import jakarta.servlet.FilterConfig; @@ -12,9 +9,11 @@ import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.util.Date; + /** * {@link Filter} to add cache control headers for resources such as CSS and images. - * */ public class ResourceCacheControlFilter implements Filter { @@ -29,7 +28,7 @@ public static void disableHttpCaching(HttpServletResponse httpResponse) { private static long expiredSinceYesterday(Date now) { return now.getTime() - 86400000L; } - + /** * C'tor */ @@ -44,10 +43,12 @@ public void init(FilterConfig config) throws ServletException { } /** - * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, jakarta.servlet.FilterChain) + * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, + * jakarta.servlet.FilterChain) */ @Override - public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { String requestURI = ((HttpServletRequest) request).getRequestURI(); Date now = new Date(); HttpServletResponse httpResponse = (HttpServletResponse) response; @@ -63,7 +64,7 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha } else if (requestURI.contains("/apis/")) { disableCaching = true; } - + if (disableCaching) { disableHttpCaching(httpResponse); } else { diff --git a/app/src/main/java/io/apicurio/registry/ui/servlets/SpecUrlFilter.java b/app/src/main/java/io/apicurio/registry/ui/servlets/SpecUrlFilter.java index 74644806ae..b7c637a299 100644 --- a/app/src/main/java/io/apicurio/registry/ui/servlets/SpecUrlFilter.java +++ b/app/src/main/java/io/apicurio/registry/ui/servlets/SpecUrlFilter.java @@ -1,10 +1,5 @@ package io.apicurio.registry.ui.servlets; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.PrintWriter; -import java.nio.charset.StandardCharsets; - import jakarta.enterprise.context.ApplicationScoped; import jakarta.servlet.Filter; import jakarta.servlet.FilterChain; @@ -18,12 +13,14 @@ import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponseWrapper; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; /** * Note: simple filtering of response content - found on Stack Overflow here: - * - * https://stackoverflow.com/a/14741213 - * + * https://stackoverflow.com/a/14741213 */ @ApplicationScoped public class SpecUrlFilter implements Filter { @@ -36,10 +33,12 @@ public void init(FilterConfig filterConfig) throws ServletException { } /** - * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, jakarta.servlet.FilterChain) + * @see jakarta.servlet.Filter#doFilter(jakarta.servlet.ServletRequest, jakarta.servlet.ServletResponse, + * jakarta.servlet.FilterChain) */ @Override - public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { + public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) + throws IOException, ServletException { CharResponseWrapper wrappedResponse = new CharResponseWrapper((HttpServletResponse) resp); chain.doFilter(req, wrappedResponse); @@ -71,6 +70,7 @@ private static class ByteArrayServletStream extends ServletOutputStream { /** * Constructor. + * * @param baos */ ByteArrayServletStream(ByteArrayOutputStream baos) { @@ -164,6 +164,7 @@ public String toString() { /** * Generates a URL that the caller can use to access the API. + * * @param request */ private String generateSpecUrl(HttpServletRequest request) { diff --git a/app/src/main/java/io/apicurio/registry/util/ArtifactIdGeneratorImpl.java b/app/src/main/java/io/apicurio/registry/util/ArtifactIdGeneratorImpl.java index d39a4c1507..1e4a51cd93 100644 --- a/app/src/main/java/io/apicurio/registry/util/ArtifactIdGeneratorImpl.java +++ b/app/src/main/java/io/apicurio/registry/util/ArtifactIdGeneratorImpl.java @@ -1,13 +1,14 @@ package io.apicurio.registry.util; -import java.util.UUID; import jakarta.enterprise.context.ApplicationScoped; +import java.util.UUID; + @ApplicationScoped public class ArtifactIdGeneratorImpl implements ArtifactIdGenerator { public String generate() { return UUID.randomUUID().toString(); } - + } diff --git a/app/src/main/java/io/apicurio/registry/util/ArtifactTypeUtil.java b/app/src/main/java/io/apicurio/registry/util/ArtifactTypeUtil.java index 2e56f7f42c..5d2af4791c 100644 --- a/app/src/main/java/io/apicurio/registry/util/ArtifactTypeUtil.java +++ b/app/src/main/java/io/apicurio/registry/util/ArtifactTypeUtil.java @@ -19,24 +19,26 @@ private ArtifactTypeUtil() { /** * Figures out the artifact type in the following order of precedent: *

- * 1) The type provided in the request - * 2) Determined from the content itself + * 1) The type provided in the request 2) Determined from the content itself * - * @param content the content - * @param artifactType the artifact type + * @param content the content + * @param artifactType the artifact type */ public static String determineArtifactType(TypedContent content, String artifactType, - ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) { - return determineArtifactType(content, artifactType, Collections.emptyMap(), artifactTypeProviderFactory); + ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) { + return determineArtifactType(content, artifactType, Collections.emptyMap(), + artifactTypeProviderFactory); } public static String determineArtifactType(TypedContent content, String artifactType, - Map resolvedReferences, ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) { + Map resolvedReferences, + ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) { if ("".equals(artifactType)) { artifactType = null; } if (artifactType == null && content != null) { - artifactType = ArtifactTypeUtil.discoverType(content, resolvedReferences, artifactTypeProviderFactory); + artifactType = ArtifactTypeUtil.discoverType(content, resolvedReferences, + artifactTypeProviderFactory); } if (!artifactTypeProviderFactory.getAllArtifactTypes().contains(artifactType)) { throw new InvalidArtifactTypeException("Invalid or unknown artifact type: " + artifactType); @@ -45,7 +47,8 @@ public static String determineArtifactType(TypedContent content, String artifact } // TODO: should we move this to ArtifactTypeUtilProvider and make this logic injectable? yes! - // as a first implementation forcing users to specify the type if its custom sounds like a reasonable tradeoff + // as a first implementation forcing users to specify the type if its custom sounds like a reasonable + // tradeoff /** * Method that discovers the artifact type from the raw content of an artifact. This will attempt to parse * the content (with the optional provided Content Type as a hint) and figure out what type of artifact it @@ -53,12 +56,13 @@ public static String determineArtifactType(TypedContent content, String artifact * formatted. So in these cases we will need to look for some sort of type-specific marker in the content * of the artifact. The method does its best to figure out the type, but will default to Avro if all else * fails. + * * @param content * @param resolvedReferences */ @SuppressWarnings("deprecation") private static String discoverType(TypedContent content, Map resolvedReferences, - ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) throws InvalidArtifactTypeException { + ArtifactTypeUtilProviderFactory artifactTypeProviderFactory) throws InvalidArtifactTypeException { for (ArtifactTypeUtilProvider provider : artifactTypeProviderFactory.getAllArtifactTypeProviders()) { if (provider.acceptsContent(content, resolvedReferences)) { return provider.getArtifactType(); @@ -68,4 +72,3 @@ private static String discoverType(TypedContent content, Map requestCustomizer) throws Exception { + protected CreateArtifactResponse createArtifact(String groupId, String artifactId, String artifactType, + String content, String contentType, Consumer requestCustomizer) throws Exception { CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); createArtifact.setArtifactType(artifactType); @@ -132,31 +133,29 @@ protected CreateArtifactResponse createArtifact(String groupId, String artifactI requestCustomizer.accept(createArtifact); } - var result = clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact) - ; + var result = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - assert( result.getArtifact().getArtifactId().equals(artifactId) ); - assert( result.getArtifact().getArtifactType().equals(artifactType) ); + assert (result.getArtifact().getArtifactId().equals(artifactId)); + assert (result.getArtifact().getArtifactType().equals(artifactType)); return result; } - protected CreateArtifactResponse createArtifactWithReferences(String groupId, String artifactId, String artifactType, String content, - String contentType, List artifactReferences) throws Exception { - var response = createArtifactExtendedRaw(groupId, artifactId, artifactType, content, contentType, artifactReferences); + protected CreateArtifactResponse createArtifactWithReferences(String groupId, String artifactId, + String artifactType, String content, String contentType, + List artifactReferences) throws Exception { + var response = createArtifactExtendedRaw(groupId, artifactId, artifactType, content, contentType, + artifactReferences); - assert( response.getArtifact().getArtifactType().equals(artifactType) ); - assert( response.getArtifact().getArtifactId().equals(artifactId) ); + assert (response.getArtifact().getArtifactType().equals(artifactType)); + assert (response.getArtifact().getArtifactId().equals(artifactId)); return response; } - protected CreateArtifactResponse createArtifactExtendedRaw(String groupId, String artifactId, String artifactType, - String content, String contentType, List versionReferences) throws Exception { + protected CreateArtifactResponse createArtifactExtendedRaw(String groupId, String artifactId, + String artifactType, String content, String contentType, + List versionReferences) throws Exception { CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); createArtifact.setArtifactType(artifactType); @@ -179,12 +178,7 @@ protected CreateArtifactResponse createArtifactExtendedRaw(String groupId, Strin versionContent.setReferences(references); } - return clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact) - ; + return clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); } protected VersionMetaData createArtifactVersionExtendedRaw(String groupId, String artifactId, @@ -205,70 +199,50 @@ protected VersionMetaData createArtifactVersionExtendedRaw(String groupId, Strin }).collect(Collectors.toList()); versionContent.setReferences(references); - return clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .versions() - .post(createVersion) - ; + return clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); } - protected Long createArtifactVersion(String artifactId, String content, String contentType) throws Exception { - return createArtifactVersion(GroupId.DEFAULT.getRawGroupIdWithDefaultString(), artifactId, content, contentType); + protected Long createArtifactVersion(String artifactId, String content, String contentType) + throws Exception { + return createArtifactVersion(GroupId.DEFAULT.getRawGroupIdWithDefaultString(), artifactId, content, + contentType); } - protected Long createArtifactVersion(String groupId, String artifactId, String content, String contentType) throws Exception { + protected Long createArtifactVersion(String groupId, String artifactId, String content, + String contentType) throws Exception { CreateVersion createVersion = new CreateVersion(); VersionContent versionContent = new VersionContent(); createVersion.setContent(versionContent); versionContent.setContent(content); versionContent.setContentType(contentType); - var version = clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .versions() - .post(createVersion) - ; + var version = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - assert( version.getArtifactId().equals(artifactId) ); + assert (version.getArtifactId().equals(artifactId)); return version.getGlobalId(); } - protected void createArtifactRule(String groupId, String artifactId, RuleType ruleType, String ruleConfig) { + protected void createArtifactRule(String groupId, String artifactId, RuleType ruleType, + String ruleConfig) { var createRule = new io.apicurio.registry.rest.client.models.CreateRule(); createRule.setConfig(ruleConfig); createRule.setRuleType(io.apicurio.registry.rest.client.models.RuleType.forValue(ruleType.value())); - clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .rules() - .post(createRule); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().post(createRule); } - protected io.apicurio.registry.rest.client.models.Rule createGlobalRule(RuleType ruleType, String ruleConfig) { + protected io.apicurio.registry.rest.client.models.Rule createGlobalRule(RuleType ruleType, + String ruleConfig) { var createRule = new io.apicurio.registry.rest.client.models.CreateRule(); createRule.setConfig(ruleConfig); createRule.setRuleType(io.apicurio.registry.rest.client.models.RuleType.forValue(ruleType.value())); - clientV3 - .admin() - .rules() - .post(createRule); + clientV3.admin().rules().post(createRule); // TODO: verify this get - return clientV3 - .admin() - .rules() - .byRuleType(ruleType.value()) - .get(); + return clientV3.admin().rules().byRuleType(ruleType.value()).get(); } /** @@ -277,26 +251,24 @@ protected io.apicurio.registry.rest.client.models.Rule createGlobalRule(RuleType * @param response * @param state */ - protected void validateMetaDataResponseState(ValidatableResponse response, ArtifactState state, boolean version) { + protected void validateMetaDataResponseState(ValidatableResponse response, ArtifactState state, + boolean version) { response.statusCode(200); response.body("state", equalTo(state.name())); } protected String getRandomValidJsonSchemaContent() { - return "{\n" + - " \"$id\": \"https://example.com/person.schema.json\",\n" + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + - " \"title\": \"Person-" + UUID.randomUUID() + "\",\n" + - " \"type\": \"object\",\n" + - " \"properties\": {\n" + - " }\n" + - "}"; + return "{\n" + " \"$id\": \"https://example.com/person.schema.json\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + " \"title\": \"Person-" + + UUID.randomUUID() + "\",\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" + + " }\n" + "}"; } protected byte[] concatContentAndReferences(byte[] contentBytes, String references) throws IOException { if (references != null) { final byte[] referencesBytes = references.getBytes(StandardCharsets.UTF_8); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(contentBytes.length + referencesBytes.length); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream( + contentBytes.length + referencesBytes.length); outputStream.write(contentBytes); outputStream.write(referencesBytes); return outputStream.toByteArray(); @@ -311,17 +283,16 @@ protected List toReferenceDtos(List ref } return references.stream() .peek(r -> r.setGroupId(new GroupId(r.getGroupId()).getRawGroupIdWithNull())) - .map(V3ApiUtil::referenceToDto) - .collect(Collectors.toList()); + .map(V3ApiUtil::referenceToDto).collect(Collectors.toList()); } protected void assertForbidden(Exception exception) { Assertions.assertEquals(ApiException.class, exception.getClass()); - Assertions.assertEquals(403, ((ApiException)exception).getResponseStatusCode()); + Assertions.assertEquals(403, ((ApiException) exception).getResponseStatusCode()); } protected void assertNotAuthorized(Exception exception) { - if (exception instanceof NotAuthorizedException) { + if (exception instanceof NotAuthorizedException) { // thrown by the token provider adapter } else { // mapped by Kiota diff --git a/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTest.java b/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTest.java index 450b479bea..aa4ada12a0 100644 --- a/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTest.java +++ b/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTest.java @@ -29,34 +29,17 @@ protected void beforeEach() throws Exception { @Test public void testCheckImportedData() throws Exception { TestUtils.retry(() -> { - given() - .when() - .accept(CT_JSON) - .get("/registry/v3/admin/rules") - .then() - .statusCode(200) - .body("[0]", equalTo("COMPATIBILITY")) - .body("[1]", nullValue()); + given().when().accept(CT_JSON).get("/registry/v3/admin/rules").then().statusCode(200) + .body("[0]", equalTo("COMPATIBILITY")).body("[1]", nullValue()); }); - given() - .when() - .accept(CT_JSON) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", is(3)) - .body("artifacts.id", containsInAnyOrder("Artifact-3", "Artifact-2", "Artifact-1")); + given().when().accept(CT_JSON).get("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", is(3)) + .body("artifacts.id", containsInAnyOrder("Artifact-3", "Artifact-2", "Artifact-1")); - given() - .when() - .accept(CT_JSON) - .get("/registry/v3/groups/ImportTest/artifacts/Artifact-1/versions") - .then() - .statusCode(200) - .body("versions.size()", is(3)) - .body("versions[0].version", equalTo("1.0.1")) - .body("versions[1].version", equalTo("1.0.2")) - .body("versions[2].version", equalTo("1.0.3")); + given().when().accept(CT_JSON).get("/registry/v3/groups/ImportTest/artifacts/Artifact-1/versions") + .then().statusCode(200).body("versions.size()", is(3)) + .body("versions[0].version", equalTo("1.0.1")).body("versions[1].version", equalTo("1.0.2")) + .body("versions[2].version", equalTo("1.0.3")); } } diff --git a/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTestProfile.java b/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTestProfile.java index 67af9ba4ab..31d1058d88 100644 --- a/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/ImportLifecycleBeanTestProfile.java @@ -9,7 +9,8 @@ public class ImportLifecycleBeanTestProfile implements QuarkusTestProfile { @Override public Map getConfigOverrides() { - return Collections.singletonMap("apicurio.import.url", getClass().getResource("rest/v3/export.zip").toExternalForm()); + return Collections.singletonMap("apicurio.import.url", + getClass().getResource("rest/v3/export.zip").toExternalForm()); } } diff --git a/app/src/test/java/io/apicurio/registry/JsonSchemas.java b/app/src/test/java/io/apicurio/registry/JsonSchemas.java index b679e8cbb9..be16f2930c 100644 --- a/app/src/test/java/io/apicurio/registry/JsonSchemas.java +++ b/app/src/test/java/io/apicurio/registry/JsonSchemas.java @@ -5,33 +5,17 @@ */ public class JsonSchemas { - public static final String jsonSchema = "{\n" + - " \"type\": \"object\",\n" + - " \"properties\": {\n" + - " \"age\": {\n" + - " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + - " \"type\": \"integer\",\n" + - " \"minimum\": 0\n" + - " },\n" + - " \"zipcode\": {\n" + - " \"description\": \"ZipCode\",\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - "}"; + public static final String jsonSchema = "{\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" + + " \"age\": {\n" + + " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + + " \"type\": \"integer\",\n" + " \"minimum\": 0\n" + " },\n" + + " \"zipcode\": {\n" + " \"description\": \"ZipCode\",\n" + + " \"type\": \"integer\"\n" + " }\n" + " }\n" + "}"; - public static final String incompatibleJsonSchema = "{\n" + - " \"type\": \"object\",\n" + - " \"properties\": {\n" + - " \"age\": {\n" + - " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + - " \"type\": \"string\",\n" + - " \"minimum\": 0\n" + - " },\n" + - " \"zipcode\": {\n" + - " \"description\": \"ZipCode\",\n" + - " \"type\": \"string\"\n" + - " }\n" + - " }\n" + - "}"; + public static final String incompatibleJsonSchema = "{\n" + " \"type\": \"object\",\n" + + " \"properties\": {\n" + " \"age\": {\n" + + " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + + " \"type\": \"string\",\n" + " \"minimum\": 0\n" + " },\n" + " \"zipcode\": {\n" + + " \"description\": \"ZipCode\",\n" + " \"type\": \"string\"\n" + " }\n" + " }\n" + + "}"; } diff --git a/app/src/test/java/io/apicurio/registry/MigrationTest.java b/app/src/test/java/io/apicurio/registry/MigrationTest.java index 613ffd5992..e2c663b3fd 100644 --- a/app/src/test/java/io/apicurio/registry/MigrationTest.java +++ b/app/src/test/java/io/apicurio/registry/MigrationTest.java @@ -8,12 +8,13 @@ @QuarkusTest public class MigrationTest extends AbstractResourceTestBase { - @Test public void migrateData() throws Exception { - InputStream originalData = getClass().getResource("rest/v3/destination_original_data.zip").openStream(); - InputStream migratedData = getClass().getResource("rest/v3/migration_test_data_dump.zip").openStream(); + InputStream originalData = getClass().getResource("rest/v3/destination_original_data.zip") + .openStream(); + InputStream migratedData = getClass().getResource("rest/v3/migration_test_data_dump.zip") + .openStream(); clientV3.admin().importEscaped().post(originalData, config -> { // TODO: this header should be injected by Kiota diff --git a/app/src/test/java/io/apicurio/registry/auth/AuthTestAnonymousCredentials.java b/app/src/test/java/io/apicurio/registry/auth/AuthTestAnonymousCredentials.java index ab8054ab7c..b864039e7d 100644 --- a/app/src/test/java/io/apicurio/registry/auth/AuthTestAnonymousCredentials.java +++ b/app/src/test/java/io/apicurio/registry/auth/AuthTestAnonymousCredentials.java @@ -36,7 +36,8 @@ public class AuthTestAnonymousCredentials extends AbstractResourceTestBase { @Test public void testWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrl, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrl, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); @@ -57,19 +58,13 @@ public void testNoCredentials() throws Exception { Assertions.assertTrue(results.getCount() >= 0); // Write operation should fail without any credentials - String data = "{\r\n" + - " \"type\" : \"record\",\r\n" + - " \"name\" : \"userInfo\",\r\n" + - " \"namespace\" : \"my.example\",\r\n" + - " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + - "}"; + String data = "{\r\n" + " \"type\" : \"record\",\r\n" + " \"name\" : \"userInfo\",\r\n" + + " \"namespace\" : \"my.example\",\r\n" + + " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + "}"; var exception = Assertions.assertThrows(ApiException.class, () -> { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact("testNoCredentials", ArtifactType.AVRO, data, ContentTypes.APPLICATION_JSON); - client - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact("testNoCredentials", + ArtifactType.AVRO, data, ContentTypes.APPLICATION_JSON); + client.groups().byGroupId(groupId).artifacts().post(createArtifact); }); Assertions.assertEquals(401, exception.getResponseStatusCode()); } diff --git a/app/src/test/java/io/apicurio/registry/auth/AuthTestAuthenticatedReadAccess.java b/app/src/test/java/io/apicurio/registry/auth/AuthTestAuthenticatedReadAccess.java index a6f4b4f803..f30bd18cb0 100644 --- a/app/src/test/java/io/apicurio/registry/auth/AuthTestAuthenticatedReadAccess.java +++ b/app/src/test/java/io/apicurio/registry/auth/AuthTestAuthenticatedReadAccess.java @@ -1,7 +1,5 @@ package io.apicurio.registry.auth; - - import io.apicurio.common.apps.config.Info; import io.apicurio.registry.AbstractResourceTestBase; import io.apicurio.registry.rest.client.RegistryClient; @@ -35,7 +33,8 @@ public class AuthTestAuthenticatedReadAccess extends AbstractResourceTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrl, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrl, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } @@ -43,26 +42,21 @@ protected RegistryClient createRestClientV3() { @Test public void testReadOperationWithNoRole() throws Exception { // Read-only operation should work with credentials but no role. - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrl, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrl, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); var results = client.search().artifacts().get(config -> config.queryParameters.groupId = groupId); Assertions.assertTrue(results.getCount() >= 0); // Write operation should fail with credentials but not role. - String data = "{\r\n" + - " \"type\" : \"record\",\r\n" + - " \"name\" : \"userInfo\",\r\n" + - " \"namespace\" : \"my.example\",\r\n" + - " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + - "}"; + String data = "{\r\n" + " \"type\" : \"record\",\r\n" + " \"name\" : \"userInfo\",\r\n" + + " \"namespace\" : \"my.example\",\r\n" + + " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + "}"; var exception = Assertions.assertThrows(Exception.class, () -> { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact("testReadOperationWithNoRole", ArtifactType.AVRO, data, ContentTypes.APPLICATION_JSON); - client - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact("testReadOperationWithNoRole", + ArtifactType.AVRO, data, ContentTypes.APPLICATION_JSON); + client.groups().byGroupId(groupId).artifacts().post(createArtifact); }); assertForbidden(exception); } diff --git a/app/src/test/java/io/apicurio/registry/auth/AuthTestLocalRoles.java b/app/src/test/java/io/apicurio/registry/auth/AuthTestLocalRoles.java index 12084e73c1..fe5d2b5460 100644 --- a/app/src/test/java/io/apicurio/registry/auth/AuthTestLocalRoles.java +++ b/app/src/test/java/io/apicurio/registry/auth/AuthTestLocalRoles.java @@ -31,19 +31,15 @@ /** * Tests local role mappings (managed in the database via the role-mapping API). - * */ @QuarkusTest @TestProfile(AuthTestProfileWithLocalRoles.class) @Tag(ApicurioTestTags.SLOW) public class AuthTestLocalRoles extends AbstractResourceTestBase { - private static final String TEST_CONTENT = "{\r\n" + - " \"type\" : \"record\",\r\n" + - " \"name\" : \"userInfo\",\r\n" + - " \"namespace\" : \"my.example\",\r\n" + - " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + - "} "; + private static final String TEST_CONTENT = "{\r\n" + " \"type\" : \"record\",\r\n" + + " \"name\" : \"userInfo\",\r\n" + " \"namespace\" : \"my.example\",\r\n" + + " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + "} "; @ConfigProperty(name = "quarkus.oidc.token-path") @Info(category = "auth", description = "Auth token endpoint", availableSince = "2.1.0.Final") @@ -51,7 +47,8 @@ public class AuthTestLocalRoles extends AbstractResourceTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } @@ -62,16 +59,19 @@ protected RegistryClient createRestClientV3() { static { createRule.setConfig(ValidityLevel.FULL.name()); createRule.setRuleType(RuleType.VALIDITY); - createArtifact = TestUtils.clientCreateArtifact(AuthTestLocalRoles.class.getSimpleName(), ArtifactType.AVRO, TEST_CONTENT, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.clientCreateArtifact(AuthTestLocalRoles.class.getSimpleName(), + ArtifactType.AVRO, TEST_CONTENT, ContentTypes.APPLICATION_JSON); } @Test public void testLocalRoles() throws Exception { - var adapterAdmin = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapterAdmin = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapterAdmin.setBaseUrl(registryV3ApiUrl); RegistryClient clientAdmin = new RegistryClient(adapterAdmin); - var adapterAuth = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); + var adapterAuth = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); adapterAuth.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapterAuth); @@ -82,12 +82,7 @@ public void testLocalRoles() throws Exception { assertForbidden(exception1); var exception2 = Assertions.assertThrows(Exception.class, () -> { - client - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact) - ; + client.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact); }); assertForbidden(exception2); @@ -107,11 +102,7 @@ public void testLocalRoles() throws Exception { client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(); var exception4 = Assertions.assertThrows(Exception.class, () -> { - client - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact); + client.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact); }); assertForbidden(exception4); var exception5 = Assertions.assertThrows(Exception.class, () -> { @@ -123,22 +114,13 @@ public void testLocalRoles() throws Exception { var devMapping = new UpdateRole(); devMapping.setRole(RoleType.DEVELOPER); - clientAdmin - .admin() - .roleMappings() - .byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID) - .put(devMapping) - ; + clientAdmin.admin().roleMappings().byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID).put(devMapping); // Now the user can read and write but not admin client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(); - client - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact, config -> { - config.headers.add("X-Registry-ArtifactId", getClass().getSimpleName()); - }); + client.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact, config -> { + config.headers.add("X-Registry-ArtifactId", getClass().getSimpleName()); + }); var exception6 = Assertions.assertThrows(Exception.class, () -> { client.admin().rules().post(createRule); }); @@ -148,29 +130,15 @@ public void testLocalRoles() throws Exception { var adminMapping = new UpdateRole(); adminMapping.setRole(RoleType.ADMIN); - clientAdmin - .admin() - .roleMappings() - .byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID) - .put(adminMapping) - ; + clientAdmin.admin().roleMappings().byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID).put(adminMapping); // Now the user can do everything client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(); - client - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact); + client.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact); client.admin().rules().post(createRule); - + // Now delete the role mapping - clientAdmin - .admin() - .roleMappings() - .byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID) - .delete() - ; - + clientAdmin.admin().roleMappings().byPrincipalId(JWKSMockServer.NO_ROLE_CLIENT_ID).delete(); + } } diff --git a/app/src/test/java/io/apicurio/registry/auth/AuthTestNoRoles.java b/app/src/test/java/io/apicurio/registry/auth/AuthTestNoRoles.java index 66b7e3c8b8..030075584d 100644 --- a/app/src/test/java/io/apicurio/registry/auth/AuthTestNoRoles.java +++ b/app/src/test/java/io/apicurio/registry/auth/AuthTestNoRoles.java @@ -1,6 +1,5 @@ package io.apicurio.registry.auth; - import io.apicurio.common.apps.config.Info; import io.apicurio.registry.AbstractResourceTestBase; import io.apicurio.registry.model.GroupId; @@ -42,14 +41,16 @@ public class AuthTestNoRoles extends AbstractResourceTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } @Test public void testWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); var exception = Assertions.assertThrows(Exception.class, () -> { @@ -60,7 +61,8 @@ public void testWrongCreds() throws Exception { @Test public void testAdminRole() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -76,19 +78,9 @@ public void testAdminRole() throws Exception { createVersion.setContent(versionContent); versionContent.setContent("{}"); versionContent.setContentType(ContentTypes.APPLICATION_JSON); - client - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact); - TestUtils.retry(() -> - client - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .get() - ); + client.groups().byGroupId(groupId).artifacts().post(createArtifact); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); assertNotNull(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); CreateRule createRule = new CreateRule(); diff --git a/app/src/test/java/io/apicurio/registry/auth/AuthTestProfileBasicClientCredentials.java b/app/src/test/java/io/apicurio/registry/auth/AuthTestProfileBasicClientCredentials.java index f666a2d144..2392ed8e09 100644 --- a/app/src/test/java/io/apicurio/registry/auth/AuthTestProfileBasicClientCredentials.java +++ b/app/src/test/java/io/apicurio/registry/auth/AuthTestProfileBasicClientCredentials.java @@ -42,13 +42,16 @@ public class AuthTestProfileBasicClientCredentials extends AbstractResourceTestB @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrl, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrl, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } + @Test public void testWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); var exception = Assertions.assertThrows(Exception.class, () -> { @@ -59,7 +62,8 @@ public void testWrongCreds() throws Exception { @Test public void testBasicAuthClientCredentials() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -76,20 +80,10 @@ public void testBasicAuthClientCredentials() throws Exception { versionContent.setContent("{}"); versionContent.setContentType(ContentTypes.APPLICATION_JSON); - client - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact); + client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> - client - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .get() - ); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); assertNotNull(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); CreateRule createRule = new CreateRule(); diff --git a/app/src/test/java/io/apicurio/registry/auth/BasicAuthWithPropertiesTest.java b/app/src/test/java/io/apicurio/registry/auth/BasicAuthWithPropertiesTest.java index 949ec38139..07e323aeb7 100644 --- a/app/src/test/java/io/apicurio/registry/auth/BasicAuthWithPropertiesTest.java +++ b/app/src/test/java/io/apicurio/registry/auth/BasicAuthWithPropertiesTest.java @@ -50,28 +50,31 @@ public class BasicAuthWithPropertiesTest extends AbstractResourceTestBase { public static final String READONLY_USERNAME = "duncan"; public static final String READONLY_PASSWORD = "duncan"; - @Override protected RegistryClient createRestClientV3() { - var adapter =new VertXRequestAdapter(buildSimpleAuthWebClient(ADMIN_USERNAME, ADMIN_PASSWORD)); + var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(ADMIN_USERNAME, ADMIN_PASSWORD)); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } private static final CreateArtifact createArtifact; static { - createArtifact = TestUtils.clientCreateArtifact(AuthTestLocalRoles.class.getSimpleName(), ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.clientCreateArtifact(AuthTestLocalRoles.class.getSimpleName(), + ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); } protected void assertArtifactNotFound(Exception exception) { Assertions.assertEquals(io.apicurio.registry.rest.client.models.Error.class, exception.getClass()); - Assertions.assertEquals("ArtifactNotFoundException", ((io.apicurio.registry.rest.client.models.Error)exception).getName()); - Assertions.assertEquals(404, ((io.apicurio.registry.rest.client.models.Error)exception).getErrorCode()); + Assertions.assertEquals("ArtifactNotFoundException", + ((io.apicurio.registry.rest.client.models.Error) exception).getName()); + Assertions.assertEquals(404, + ((io.apicurio.registry.rest.client.models.Error) exception).getErrorCode()); } @Test public void testWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(UUID.randomUUID().toString(), UUID.randomUUID().toString())); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(UUID.randomUUID().toString(), UUID.randomUUID().toString())); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); var exception = Assertions.assertThrows(ApiException.class, () -> { @@ -102,13 +105,16 @@ public void testReadOnly() throws Exception { }); assertForbidden(exception3); - var devAdapter = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var devAdapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); devAdapter.setBaseUrl(registryV3ApiUrl); RegistryClient devClient = new RegistryClient(devAdapter); - VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts().byArtifactId(meta.getArtifactId()).get()); + TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(meta.getArtifactId()).get()); assertNotNull(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); @@ -122,7 +128,8 @@ public void testReadOnly() throws Exception { @Test public void testDevRole() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -131,9 +138,11 @@ public void testDevRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -167,9 +176,11 @@ public void testAdminRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -191,7 +202,8 @@ public void testAdminRole() throws Exception { @Test public void testOwnerOnlyAuthorization() throws Exception { - var devAdapter = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var devAdapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); devAdapter.setBaseUrl(registryV3ApiUrl); RegistryClient clientDev = new RegistryClient(devAdapter); @@ -215,7 +227,6 @@ public void testOwnerOnlyAuthorization() throws Exception { // But the admin user CAN make the change. clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(updatedMetaData); - // Now the Dev user will create an artifact String artifactId2 = TestUtils.generateArtifactId(); createArtifact.setArtifactId(artifactId2); @@ -225,43 +236,47 @@ public void testOwnerOnlyAuthorization() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.BACKWARD.name()); - clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId2).rules().post(createRule); + clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId2).rules() + .post(createRule); } @Test public void testGetArtifactOwner() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); - //Preparation + // Preparation final String groupId = "testGetArtifactOwner"; final String artifactId = generateArtifactId(); final String version = "1"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); - final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals(DEVELOPER_USERNAME, created.getOwner()); - //Get the artifact owner via the REST API and verify it + // Get the artifact owner via the REST API and verify it ArtifactMetaData amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals(DEVELOPER_USERNAME, amd.getOwner()); } @Test public void testUpdateArtifactOwner() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); - //Preparation + // Preparation final String groupId = "testUpdateArtifactOwner"; final String artifactId = generateArtifactId(); @@ -269,46 +284,49 @@ public void testUpdateArtifactOwner() throws Exception { final String name = "testUpdateArtifactOwnerName"; final String description = "testUpdateArtifactOwnerDescription"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); createArtifact.getFirstVersion().setVersion(version); createArtifact.getFirstVersion().setName(name); createArtifact.getFirstVersion().setDescription(description); createArtifact.setName(name); createArtifact.setDescription(description); - final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals(DEVELOPER_USERNAME, created.getOwner()); - //Get the artifact owner via the REST API and verify it + // Get the artifact owner via the REST API and verify it ArtifactMetaData amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals(DEVELOPER_USERNAME, amd.getOwner()); - //Update the owner + // Update the owner EditableArtifactMetaData eamd = new EditableArtifactMetaData(); eamd.setOwner(DEVELOPER_2_USERNAME); client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(eamd); - //Check that the update worked + // Check that the update worked amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals(DEVELOPER_2_USERNAME, amd.getOwner()); } @Test public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { - var adapter_dev1 = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); + var adapter_dev1 = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_USERNAME, DEVELOPER_PASSWORD)); adapter_dev1.setBaseUrl(registryV3ApiUrl); RegistryClient client_dev1 = new RegistryClient(adapter_dev1); - var adapter_dev2 = new VertXRequestAdapter(buildSimpleAuthWebClient(DEVELOPER_2_USERNAME, DEVELOPER_2_PASSWORD)); + var adapter_dev2 = new VertXRequestAdapter( + buildSimpleAuthWebClient(DEVELOPER_2_USERNAME, DEVELOPER_2_PASSWORD)); adapter_dev2.setBaseUrl(registryV3ApiUrl); RegistryClient client_dev2 = new RegistryClient(adapter_dev2); - //Preparation + // Preparation final String groupId = "testUpdateArtifactOwnerOnlyByOwner"; final String artifactId = generateArtifactId(); @@ -316,27 +334,29 @@ public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { final String name = "testUpdateArtifactOwnerOnlyByOwnerName"; final String description = "testUpdateArtifactOwnerOnlyByOwnerDescription"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); createArtifact.getFirstVersion().setVersion(version); createArtifact.getFirstVersion().setName(name); createArtifact.getFirstVersion().setDescription(description); createArtifact.setName(name); createArtifact.setDescription(description); - final VersionMetaData created = client_dev1.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client_dev1.groups().byGroupId(groupId).artifacts() + .post(createArtifact).getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals(DEVELOPER_USERNAME, created.getOwner()); - //Get the artifact owner via the REST API and verify it - ArtifactMetaData amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + // Get the artifact owner via the REST API and verify it + ArtifactMetaData amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .get(); assertEquals(DEVELOPER_USERNAME, amd.getOwner()); - //Try to update the owner by dev2 (should fail) + // Try to update the owner by dev2 (should fail) var exception1 = assertThrows(Exception.class, () -> { EditableArtifactMetaData eamd = new EditableArtifactMetaData(); eamd.setOwner(DEVELOPER_2_USERNAME); @@ -344,7 +364,7 @@ public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { }); assertForbidden(exception1); - //Should still be the original owner + // Should still be the original owner amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals(DEVELOPER_USERNAME, amd.getOwner()); } diff --git a/app/src/test/java/io/apicurio/registry/auth/HeaderRoleSourceTest.java b/app/src/test/java/io/apicurio/registry/auth/HeaderRoleSourceTest.java index 12b7837c7c..3c65b93f71 100644 --- a/app/src/test/java/io/apicurio/registry/auth/HeaderRoleSourceTest.java +++ b/app/src/test/java/io/apicurio/registry/auth/HeaderRoleSourceTest.java @@ -1,7 +1,5 @@ package io.apicurio.registry.auth; - - import io.apicurio.common.apps.config.Info; import io.apicurio.registry.AbstractResourceTestBase; import io.apicurio.registry.model.GroupId; @@ -26,18 +24,14 @@ import static io.apicurio.registry.client.auth.VertXAuthFactory.buildOIDCWebClient; - @QuarkusTest @TestProfile(AuthTestProfileWithHeaderRoles.class) @Tag(ApicurioTestTags.SLOW) public class HeaderRoleSourceTest extends AbstractResourceTestBase { - private static final String TEST_CONTENT = "{\r\n" + - " \"type\" : \"record\",\r\n" + - " \"name\" : \"userInfo\",\r\n" + - " \"namespace\" : \"my.example\",\r\n" + - " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + - "} "; + private static final String TEST_CONTENT = "{\r\n" + " \"type\" : \"record\",\r\n" + + " \"name\" : \"userInfo\",\r\n" + " \"namespace\" : \"my.example\",\r\n" + + " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\r\n" + "} "; @ConfigProperty(name = "quarkus.oidc.token-path") @Info(category = "auth", description = "Auth token endpoint", availableSince = "2.1.0.Final") @@ -45,48 +39,50 @@ public class HeaderRoleSourceTest extends AbstractResourceTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } @Test public void testLocalRoles() throws Exception { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(getClass().getSimpleName(), ArtifactType.AVRO, TEST_CONTENT, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(getClass().getSimpleName(), + ArtifactType.AVRO, TEST_CONTENT, ContentTypes.APPLICATION_JSON); var rule = new io.apicurio.registry.rest.client.models.CreateRule(); rule.setConfig(ValidityLevel.FULL.name()); rule.setRuleType(io.apicurio.registry.rest.client.models.RuleType.VALIDITY); - var noRoleAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); + var noRoleAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.NO_ROLE_CLIENT_ID, "test1")); noRoleAdapter.setBaseUrl(registryV3ApiUrl); var noRoleClient = new RegistryClient(noRoleAdapter); - var readAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); + var readAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); readAdapter.setBaseUrl(registryV3ApiUrl); var readClient = new RegistryClient(readAdapter); - var devAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var devAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); devAdapter.setBaseUrl(registryV3ApiUrl); var devClient = new RegistryClient(devAdapter); - var adminAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adminAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adminAdapter.setBaseUrl(registryV3ApiUrl); var adminClient = new RegistryClient(adminAdapter); - // User is authenticated but no roles assigned - operations should fail. var exception1 = Assertions.assertThrows(Exception.class, () -> { - noRoleClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(); + noRoleClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .get(); }); assertForbidden(exception1); var exception2 = Assertions.assertThrows(Exception.class, () -> { - noRoleClient - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact); + noRoleClient.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact); }); assertForbidden(exception2); @@ -95,17 +91,14 @@ public void testLocalRoles() throws Exception { }); assertForbidden(exception3); - // Now using the read client user should be able to read but nothing else - readClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(config -> { - config.headers.add("X-Registry-Role", "sr-readonly"); - }); + readClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .get(config -> { + config.headers.add("X-Registry-Role", "sr-readonly"); + }); var exception4 = Assertions.assertThrows(Exception.class, () -> { - readClient - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact, config -> { + readClient.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact, + config -> { config.headers.add("X-Registry-Role", "sr-readonly"); }); }); @@ -119,16 +112,14 @@ public void testLocalRoles() throws Exception { assertForbidden(exception5); // the user can read and write with the developer client but not admin - devClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(config -> { - config.headers.add("X-Registry-Role", "sr-developer"); - }); - devClient - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact, config -> { - config.headers.add("X-Registry-Role", "sr-developer"); - }); + devClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .get(config -> { + config.headers.add("X-Registry-Role", "sr-developer"); + }); + devClient.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact, + config -> { + config.headers.add("X-Registry-Role", "sr-developer"); + }); var exception6 = Assertions.assertThrows(Exception.class, () -> { devClient.admin().rules().post(rule, config -> { config.headers.add("X-Registry-Role", "sr-developer"); @@ -137,14 +128,12 @@ public void testLocalRoles() throws Exception { assertForbidden(exception6); // the user can do everything with the admin client - adminClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().get(config -> { - config.headers.add("X-Registry-Role", "sr-admin"); - }); - adminClient - .groups() - .byGroupId(UUID.randomUUID().toString()) - .artifacts() - .post(createArtifact, config -> { + adminClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .get(config -> { + config.headers.add("X-Registry-Role", "sr-admin"); + }); + adminClient.groups().byGroupId(UUID.randomUUID().toString()).artifacts().post(createArtifact, + config -> { config.headers.add("X-Registry-Role", "sr-admin"); }); adminClient.admin().rules().post(rule, config -> { diff --git a/app/src/test/java/io/apicurio/registry/auth/MojoAuthTest.java b/app/src/test/java/io/apicurio/registry/auth/MojoAuthTest.java index eeafd27239..cefecf5651 100644 --- a/app/src/test/java/io/apicurio/registry/auth/MojoAuthTest.java +++ b/app/src/test/java/io/apicurio/registry/auth/MojoAuthTest.java @@ -1,11 +1,9 @@ package io.apicurio.registry.auth; - - import io.apicurio.common.apps.config.Info; import io.apicurio.registry.maven.RegisterRegistryMojo; -import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.noprofile.maven.RegistryMojoTestBase; +import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.utils.tests.ApicurioTestTags; import io.apicurio.registry.utils.tests.AuthTestProfile; import io.apicurio.registry.utils.tests.JWKSMockServer; @@ -45,7 +43,8 @@ public class MojoAuthTest extends RegistryMojoTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } diff --git a/app/src/test/java/io/apicurio/registry/auth/SimpleAuthTest.java b/app/src/test/java/io/apicurio/registry/auth/SimpleAuthTest.java index 6b11640cdc..d1eae3292b 100644 --- a/app/src/test/java/io/apicurio/registry/auth/SimpleAuthTest.java +++ b/app/src/test/java/io/apicurio/registry/auth/SimpleAuthTest.java @@ -52,7 +52,8 @@ public class SimpleAuthTest extends AbstractResourceTestBase { @Override protected RegistryClient createRestClientV3() { - var adapter =new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); return new RegistryClient(adapter); } @@ -70,13 +71,16 @@ protected RegistryClient createRestClientV3() { protected void assertArtifactNotFound(Exception exception) { Assertions.assertEquals(io.apicurio.registry.rest.client.models.Error.class, exception.getClass()); - Assertions.assertEquals("ArtifactNotFoundException", ((io.apicurio.registry.rest.client.models.Error)exception).getName()); - Assertions.assertEquals(404, ((io.apicurio.registry.rest.client.models.Error)exception).getErrorCode()); + Assertions.assertEquals("ArtifactNotFoundException", + ((io.apicurio.registry.rest.client.models.Error) exception).getName()); + Assertions.assertEquals(404, + ((io.apicurio.registry.rest.client.models.Error) exception).getErrorCode()); } @Test public void testWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); var exception = Assertions.assertThrows(Exception.class, () -> { @@ -87,7 +91,8 @@ public void testWrongCreds() throws Exception { @Test public void testReadOnly() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -110,14 +115,17 @@ public void testReadOnly() throws Exception { config.queryParameters.dryRun = true; }); - var devAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var devAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); devAdapter.setBaseUrl(registryV3ApiUrl); RegistryClient devClient = new RegistryClient(devAdapter); createArtifact.setArtifactId(artifactId); - VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts().byArtifactId(meta.getArtifactId()).get()); + TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(meta.getArtifactId()).get()); assertNotNull(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); @@ -131,7 +139,8 @@ public void testReadOnly() throws Exception { @Test public void testDevRole() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -140,9 +149,11 @@ public void testDevRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -167,7 +178,8 @@ public void testDevRole() throws Exception { @Test public void testAdminRole() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -176,9 +188,11 @@ public void testAdminRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -200,7 +214,8 @@ public void testAdminRole() throws Exception { @Test public void testAdminRoleBasicAuth() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(JWKSMockServer.BASIC_USER, JWKSMockServer.BASIC_PASSWORD)); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(JWKSMockServer.BASIC_USER, JWKSMockServer.BASIC_PASSWORD)); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -209,9 +224,11 @@ public void testAdminRoleBasicAuth() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -226,7 +243,8 @@ public void testAdminRoleBasicAuth() throws Exception { @Test public void testAdminRoleBasicAuthWrongCreds() throws Exception { - var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(JWKSMockServer.WRONG_CREDS_CLIENT_ID, UUID.randomUUID().toString())); + var adapter = new VertXRequestAdapter( + buildSimpleAuthWebClient(JWKSMockServer.WRONG_CREDS_CLIENT_ID, UUID.randomUUID().toString())); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -244,11 +262,13 @@ public void testAdminRoleBasicAuthWrongCreds() throws Exception { @Test public void testOwnerOnlyAuthorization() throws Exception { - var devAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var devAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); devAdapter.setBaseUrl(registryV3ApiUrl); RegistryClient clientDev = new RegistryClient(devAdapter); - var adminAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adminAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adminAdapter.setBaseUrl(registryV3ApiUrl); RegistryClient clientAdmin = new RegistryClient(adminAdapter); @@ -268,7 +288,6 @@ public void testOwnerOnlyAuthorization() throws Exception { // But the admin user CAN make the change. clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(updatedMetaData); - // Now the Dev user will create an artifact String artifactId2 = TestUtils.generateArtifactId(); createArtifact.setArtifactId(artifactId2); @@ -278,43 +297,47 @@ public void testOwnerOnlyAuthorization() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.BACKWARD.name()); - clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId2).rules().post(createRule); + clientAdmin.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId2).rules() + .post(createRule); } @Test public void testGetArtifactOwner() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); - //Preparation + // Preparation final String groupId = "testGetArtifactOwner"; final String artifactId = generateArtifactId(); final String version = "1"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); - final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals("developer-client", created.getOwner()); - //Get the artifact owner via the REST API and verify it + // Get the artifact owner via the REST API and verify it ArtifactMetaData amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals("developer-client", amd.getOwner()); } @Test public void testUpdateArtifactOwner() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); adapter.setBaseUrl(registryV3ApiUrl); RegistryClient client = new RegistryClient(adapter); - //Preparation + // Preparation final String groupId = "testUpdateArtifactOwner"; final String artifactId = generateArtifactId(); @@ -322,44 +345,47 @@ public void testUpdateArtifactOwner() throws Exception { final String name = "testUpdateArtifactOwnerName"; final String description = "testUpdateArtifactOwnerDescription"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); createArtifact.getFirstVersion().setVersion(version); createArtifact.getFirstVersion().setName(name); createArtifact.getFirstVersion().setDescription(description); - final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals("developer-client", created.getOwner()); - //Get the artifact owner via the REST API and verify it + // Get the artifact owner via the REST API and verify it ArtifactMetaData amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals("developer-client", amd.getOwner()); - //Update the owner + // Update the owner EditableArtifactMetaData eamd = new EditableArtifactMetaData(); eamd.setOwner("developer-2-client"); client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(eamd); - //Check that the update worked + // Check that the update worked amd = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals("developer-2-client", amd.getOwner()); } @Test public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { - var adapter_dev1 = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var adapter_dev1 = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); adapter_dev1.setBaseUrl(registryV3ApiUrl); RegistryClient client_dev1 = new RegistryClient(adapter_dev1); - var adapter_dev2 = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_2_CLIENT_ID, "test1")); + var adapter_dev2 = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_2_CLIENT_ID, "test1")); adapter_dev2.setBaseUrl(registryV3ApiUrl); RegistryClient client_dev2 = new RegistryClient(adapter_dev2); - //Preparation + // Preparation final String groupId = "testUpdateArtifactOwnerOnlyByOwner"; final String artifactId = generateArtifactId(); @@ -367,25 +393,27 @@ public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { final String name = "testUpdateArtifactOwnerOnlyByOwnerName"; final String description = "testUpdateArtifactOwnerOnlyByOwnerDescription"; - //Execution + // Execution createArtifact.setArtifactId(artifactId); createArtifact.getFirstVersion().setVersion(version); createArtifact.getFirstVersion().setName(name); createArtifact.getFirstVersion().setDescription(description); - final VersionMetaData created = client_dev1.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = client_dev1.groups().byGroupId(groupId).artifacts() + .post(createArtifact).getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals("developer-client", created.getOwner()); - //Get the artifact owner via the REST API and verify it - ArtifactMetaData amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + // Get the artifact owner via the REST API and verify it + ArtifactMetaData amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .get(); assertEquals("developer-client", amd.getOwner()); - //Try to update the owner by dev2 (should fail) + // Try to update the owner by dev2 (should fail) var exception1 = assertThrows(Exception.class, () -> { EditableArtifactMetaData eamd = new EditableArtifactMetaData(); eamd.setOwner("developer-2-client"); @@ -393,10 +421,9 @@ public void testUpdateArtifactOwnerOnlyByOwner() throws Exception { }); assertForbidden(exception1); - //Should still be the original owner + // Should still be the original owner amd = client_dev1.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals("developer-client", amd.getOwner()); } } - diff --git a/app/src/test/java/io/apicurio/registry/ccompat/rest/CCompatCanonicalModeTest.java b/app/src/test/java/io/apicurio/registry/ccompat/rest/CCompatCanonicalModeTest.java index 634fa7a3ef..164e416870 100644 --- a/app/src/test/java/io/apicurio/registry/ccompat/rest/CCompatCanonicalModeTest.java +++ b/app/src/test/java/io/apicurio/registry/ccompat/rest/CCompatCanonicalModeTest.java @@ -35,13 +35,10 @@ public void canonicalModeEnabled() throws Exception { SchemaContent schemaContent = new SchemaContent(testSchemaExpanded); // POST - final Integer contentId1 = given() - .when() + final Integer contentId1 = given().when() .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(MAPPER.writeValueAsString(schemaContent)) - .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) + .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT).then().statusCode(200) .body("id", Matchers.allOf(Matchers.isA(Integer.class), Matchers.greaterThanOrEqualTo(0))) .extract().body().jsonPath().get("id"); @@ -49,26 +46,19 @@ public void canonicalModeEnabled() throws Exception { SchemaContent minifiedSchemaContent = new SchemaContent(resourceToString("avro-minified.avsc")); - //With the canonical hash mode enabled, getting the schema by content works - given() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + // With the canonical hash mode enabled, getting the schema by content works + given().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(MAPPER.writeValueAsString(minifiedSchemaContent)) - .post("/ccompat/v7/subjects/{subject}", SUBJECT) - .then() - .statusCode(200); + .post("/ccompat/v7/subjects/{subject}", SUBJECT).then().statusCode(200); // POST - //Create just returns the id from the existing schema, since the canonical hash is the same. - assertEquals(contentId1, given() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) - .body(MAPPER.writeValueAsString(minifiedSchemaContent)) - .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) - .body("id", Matchers.allOf(Matchers.isA(Integer.class), Matchers.equalTo(contentId1))) - .extract().body().jsonPath().get("id")); + // Create just returns the id from the existing schema, since the canonical hash is the same. + assertEquals(contentId1, + given().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + .body(MAPPER.writeValueAsString(minifiedSchemaContent)) + .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT).then().statusCode(200) + .body("id", Matchers.allOf(Matchers.isA(Integer.class), Matchers.equalTo(contentId1))) + .extract().body().jsonPath().get("id")); } @Test @@ -78,27 +68,18 @@ public void issue2902() throws Exception { SchemaContent schemaContent = new SchemaContent(schemaString1); // POST - SchemaId schemaId1 = given() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + SchemaId schemaId1 = given().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(MAPPER.writeValueAsString(schemaContent)) - .post("/ccompat/v7/subjects/{subject}/versions", subject1) - .then() - .statusCode(200) - .extract().as(SchemaId.class); + .post("/ccompat/v7/subjects/{subject}/versions", subject1).then().statusCode(200).extract() + .as(SchemaId.class); assertNotNull(schemaId1); assertNotNull(schemaId1.getId()); assertTrue(schemaId1.getId() > 0); - // We are able to get the original content - Schema schema1R = given() - .when() - .contentType(ContentTypes.JSON) - .get("/ccompat/v7/subjects/{subject}/versions/latest", subject1) - .then() - .statusCode(200) + Schema schema1R = given().when().contentType(ContentTypes.JSON) + .get("/ccompat/v7/subjects/{subject}/versions/latest", subject1).then().statusCode(200) .extract().as(Schema.class); assertEquals(schemaString1, schema1R.getSchema()); diff --git a/app/src/test/java/io/apicurio/registry/limits/LimitsTest.java b/app/src/test/java/io/apicurio/registry/limits/LimitsTest.java index 3973e0bbc2..e6a73d4d16 100644 --- a/app/src/test/java/io/apicurio/registry/limits/LimitsTest.java +++ b/app/src/test/java/io/apicurio/registry/limits/LimitsTest.java @@ -47,7 +47,8 @@ public void cleanUpData() { @Test public void testLimits() throws Exception { - InputStream jsonSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); + InputStream jsonSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); Assertions.assertNotNull(jsonSchema); String content = IoUtil.toString(jsonSchema); @@ -56,51 +57,36 @@ public void testLimits() throws Exception { createArtifact(artifactId, ArtifactType.JSON, content, ContentTypes.APPLICATION_JSON); createArtifactVersion(artifactId, content, ContentTypes.APPLICATION_JSON); - //valid metadata + // valid metadata EditableVersionMetaData meta = new EditableVersionMetaData(); meta.setName(StringUtils.repeat('a', 512)); meta.setDescription(StringUtils.repeat('a', 1024)); String fourBytesText = StringUtils.repeat('a', 4); var labels = new Labels(); - labels.setAdditionalData(Map.of( - StringUtils.repeat('a', 4), fourBytesText, - StringUtils.repeat('b', 4), fourBytesText)); + labels.setAdditionalData( + Map.of(StringUtils.repeat('a', 4), fourBytesText, StringUtils.repeat('b', 4), fourBytesText)); meta.setLabels(labels); - clientV3 - .groups() - // TODO: verify groupId = null cannot be used - .byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) - .artifacts() - .byArtifactId(artifactId) - .versions() - .byVersionExpression("1") - .put(meta) - ; + clientV3.groups() + // TODO: verify groupId = null cannot be used + .byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("1").put(meta); - //invalid metadata + // invalid metadata EditableVersionMetaData invalidmeta = new EditableVersionMetaData(); invalidmeta.setName(StringUtils.repeat('a', 513)); invalidmeta.setDescription(StringUtils.repeat('a', 1025)); String fiveBytesText = StringUtils.repeat('a', 5); var labels2 = new Labels(); - labels2.setAdditionalData(Map.of( - StringUtils.repeat('a', 5), fiveBytesText, - StringUtils.repeat('b', 5), fiveBytesText)); + labels2.setAdditionalData( + Map.of(StringUtils.repeat('a', 5), fiveBytesText, StringUtils.repeat('b', 5), fiveBytesText)); invalidmeta.setLabels(labels2); var exception1 = Assertions.assertThrows(ApiException.class, () -> { - clientV3 - .groups() - .byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) - .artifacts() - .byArtifactId(artifactId) - .versions() - .byVersionExpression("1") - .put(invalidmeta) - ; + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("1").put(invalidmeta); }); Assertions.assertEquals(409, exception1.getResponseStatusCode()); - //schema number 3 , exceeds the max number of schemas + // schema number 3 , exceeds the max number of schemas var exception2 = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); @@ -112,11 +98,8 @@ public void testLimits() throws Exception { versionContent.setContent("{}"); versionContent.setContentType(ContentTypes.APPLICATION_JSON); - clientV3 - .groups() - .byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) - .artifacts() - .post(createArtifact); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .post(createArtifact); }); Assertions.assertEquals(409, exception2.getErrorCode()); } diff --git a/app/src/test/java/io/apicurio/registry/limits/LimitsTestProfile.java b/app/src/test/java/io/apicurio/registry/limits/LimitsTestProfile.java index 1fd4d2d180..3e8aba8d7d 100644 --- a/app/src/test/java/io/apicurio/registry/limits/LimitsTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/limits/LimitsTestProfile.java @@ -1,10 +1,10 @@ package io.apicurio.registry.limits; +import io.quarkus.test.junit.QuarkusTestProfile; + import java.util.HashMap; import java.util.Map; -import io.quarkus.test.junit.QuarkusTestProfile; - public class LimitsTestProfile implements QuarkusTestProfile { @Override @@ -12,14 +12,15 @@ public Map getConfigOverrides() { Map props = new HashMap<>(); props.put("apicurio.limits.config.max-total-schemas", "2"); props.put("apicurio.limits.config.max-artifact-properties", "2"); - props.put("apicurio.limits.config.max-property-key-size", "4"); //use text test + props.put("apicurio.limits.config.max-property-key-size", "4"); // use text test props.put("apicurio.limits.config.max-property-value-size", "4"); props.put("apicurio.limits.config.max-artifact-labels", "2"); props.put("apicurio.limits.config.max-label-size", "4"); props.put("apicurio.limits.config.max-name-length", "512"); props.put("apicurio.limits.config.max-description-length", "1024"); - //this will do nothing, no server will be available, it's just to test the usage of two decorators at the same time + // this will do nothing, no server will be available, it's just to test the usage of two decorators at + // the same time props.put("apicurio.events.sink.testsink", "http://localhost:8888/thisisfailingonpurpose"); return props; diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ArtifactSearchTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ArtifactSearchTest.java index a04ac4ea41..662eaad57b 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ArtifactSearchTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ArtifactSearchTest.java @@ -18,14 +18,10 @@ @QuarkusTest public class ArtifactSearchTest extends AbstractResourceTestBase { - private static final String OPENAPI_CONTENT_TEMPLATE = "{\r\n" + - " \"openapi\": \"3.0.2\",\r\n" + - " \"info\": {\r\n" + - " \"title\": \"TITLE\",\r\n" + - " \"version\": \"1.0.0\",\r\n" + - " \"description\": \"DESCRIPTION\"\r\n" + - " }\r\n" + - "}"; + private static final String OPENAPI_CONTENT_TEMPLATE = "{\r\n" + " \"openapi\": \"3.0.2\",\r\n" + + " \"info\": {\r\n" + " \"title\": \"TITLE\",\r\n" + + " \"version\": \"1.0.0\",\r\n" + " \"description\": \"DESCRIPTION\"\r\n" + + " }\r\n" + "}"; @Test void testCaseInsensitiveSearch() throws Exception { @@ -38,12 +34,13 @@ void testCaseInsensitiveSearch() throws Exception { String description = "The quick brown FOX jumped over the Lazy dog."; String content = OPENAPI_CONTENT_TEMPLATE.replace("TITLE", title).replace("DESCRIPTION", description); - createArtifact(groupId, artifactId, ArtifactType.OPENAPI, content, ContentTypes.APPLICATION_JSON, (createArtifact) -> { - createArtifact.setName(title); - createArtifact.setDescription(description); - createArtifact.getFirstVersion().setName(title); - createArtifact.getFirstVersion().setDescription(description); - }); + createArtifact(groupId, artifactId, ArtifactType.OPENAPI, content, ContentTypes.APPLICATION_JSON, + (createArtifact) -> { + createArtifact.setName(title); + createArtifact.setDescription(description); + createArtifact.getFirstVersion().setName(title); + createArtifact.getFirstVersion().setDescription(description); + }); // Search against the name, with the exact name of the artifact ArtifactSearchResults results = clientV3.search().artifacts().get(config -> { @@ -62,7 +59,8 @@ void testCaseInsensitiveSearch() throws Exception { metaData.setName(title); metaData.setDescription(description); Labels labels = new Labels(); - labels.setAdditionalData(Collections.singletonMap("testCaseInsensitiveSearchKey", "testCaseInsensitiveSearchValue")); + labels.setAdditionalData( + Collections.singletonMap("testCaseInsensitiveSearchKey", "testCaseInsensitiveSearchValue")); metaData.setLabels(labels); clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(metaData); @@ -73,7 +71,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey"}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey" }; }); Assertions.assertNotNull(ires); Assertions.assertEquals(1, ires.getCount()); @@ -83,7 +81,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey".toLowerCase()}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey".toLowerCase() }; }); Assertions.assertNotNull(ires); Assertions.assertEquals(1, ires.getCount()); @@ -93,7 +91,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey".toUpperCase()}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey".toUpperCase() }; }); Assertions.assertNotNull(ires); Assertions.assertEquals(1, ires.getCount()); @@ -103,7 +101,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"TESTCaseInsensitiveSEARCHKey"}; + config.queryParameters.labels = new String[] { "TESTCaseInsensitiveSEARCHKey" }; }); Assertions.assertNotNull(ires); Assertions.assertEquals(1, ires.getCount()); @@ -115,7 +113,8 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue"}; + config.queryParameters.labels = new String[] { + "testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue" }; }); Assertions.assertNotNull(propertiesSearch); Assertions.assertEquals(1, propertiesSearch.getCount()); @@ -125,7 +124,8 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue".toLowerCase()}; + config.queryParameters.labels = new String[] { + "testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue".toLowerCase() }; }); Assertions.assertNotNull(propertiesSearch); Assertions.assertEquals(1, propertiesSearch.getCount()); @@ -135,7 +135,8 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue".toUpperCase()}; + config.queryParameters.labels = new String[] { + "testCaseInsensitiveSearchKey:testCaseInsensitiveSearchValue".toUpperCase() }; }); Assertions.assertNotNull(propertiesSearch); Assertions.assertEquals(1, propertiesSearch.getCount()); @@ -145,7 +146,8 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"TESTCaseInsensitiveSEARCHKey:TESTCaseInsensitiveSearchVALUE".toUpperCase()}; + config.queryParameters.labels = new String[] { + "TESTCaseInsensitiveSEARCHKey:TESTCaseInsensitiveSearchVALUE".toUpperCase() }; }); Assertions.assertNotNull(propertiesSearch); Assertions.assertEquals(1, propertiesSearch.getCount()); @@ -157,7 +159,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey"}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey" }; }); Assertions.assertNotNull(propertiesKeySearch); Assertions.assertEquals(1, propertiesKeySearch.getCount()); @@ -167,7 +169,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey".toLowerCase()}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey".toLowerCase() }; }); Assertions.assertNotNull(propertiesKeySearch); Assertions.assertEquals(1, propertiesKeySearch.getCount()); @@ -177,7 +179,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"testCaseInsensitiveSearchKey".toUpperCase()}; + config.queryParameters.labels = new String[] { "testCaseInsensitiveSearchKey".toUpperCase() }; }); Assertions.assertNotNull(propertiesKeySearch); Assertions.assertEquals(1, propertiesKeySearch.getCount()); @@ -187,7 +189,7 @@ void testCaseInsensitiveSearch() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.offset = 0; config.queryParameters.limit = 10; - config.queryParameters.labels = new String[]{"TESTCaseInsensitiveSEARCHKey"}; + config.queryParameters.labels = new String[] { "TESTCaseInsensitiveSEARCHKey" }; }); Assertions.assertNotNull(propertiesKeySearch); Assertions.assertEquals(1, propertiesSearch.getCount()); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ArtifactTypeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ArtifactTypeTest.java index be50fbd700..ddf6e86dda 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ArtifactTypeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ArtifactTypeTest.java @@ -28,20 +28,12 @@ public class ArtifactTypeTest extends AbstractRegistryTestBase { @Inject ArtifactTypeUtilProviderFactory factory; - private static String PROTO_DATA = "syntax = \"proto2\";\n" + - "\n" + - "message ProtoSchema {\n" + - " required string message = 1;\n" + - " required int64 time = 2;\n" + - "}"; - - private static String PROTO_DATA_2 = "syntax = \"proto2\";\n" + - "\n" + - "message ProtoSchema {\n" + - " required string message = 1;\n" + - " required int64 time = 2;\n" + - " required string code = 3;\n" + - "}"; + private static String PROTO_DATA = "syntax = \"proto2\";\n" + "\n" + "message ProtoSchema {\n" + + " required string message = 1;\n" + " required int64 time = 2;\n" + "}"; + + private static String PROTO_DATA_2 = "syntax = \"proto2\";\n" + "\n" + "message ProtoSchema {\n" + + " required string message = 1;\n" + " required int64 time = 2;\n" + + " required string code = 3;\n" + "}"; @Test public void testAvro() { @@ -50,14 +42,16 @@ public void testAvro() { ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(avro); CompatibilityChecker checker = provider.getCompatibilityChecker(); - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.emptyList(), asTypedContent(avroString), Collections.emptyMap()); + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.BACKWARD, Collections.emptyList(), asTypedContent(avroString), + Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); String avroString2 = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\", \"qq\":\"ff\"}]}"; - compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(asTypedContent(avroString)), - asTypedContent(avroString2), Collections.emptyMap()); + compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, + Collections.singletonList(asTypedContent(avroString)), asTypedContent(avroString2), + Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); } @@ -72,26 +66,32 @@ public void testJson() { Assertions.assertTrue(checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.emptyList(), asTypedContent(jsonString), Collections.emptyMap()).isCompatible()); - Assertions.assertTrue(checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(asTypedContent(jsonString)), - asTypedContent(jsonString), Collections.emptyMap()).isCompatible()); + Assertions.assertTrue(checker.testCompatibility(CompatibilityLevel.BACKWARD, + Collections.singletonList(asTypedContent(jsonString)), asTypedContent(jsonString), + Collections.emptyMap()).isCompatible()); - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.singletonList(asTypedContent(jsonString)), asTypedContent(incompatibleJsonString), Collections.emptyMap()); + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.BACKWARD, Collections.singletonList(asTypedContent(jsonString)), + asTypedContent(incompatibleJsonString), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); - Set incompatibleDifferences = compatibilityExecutionResult.getIncompatibleDifferences(); + Set incompatibleDifferences = compatibilityExecutionResult + .getIncompatibleDifferences(); Difference ageDiff = findDiffByPathUpdated(incompatibleDifferences, "/properties/age"); Difference zipCodeDiff = findDiffByPathUpdated(incompatibleDifferences, "/properties/zipcode"); - Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), ageDiff.getDiffType().getDescription()); + Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), + ageDiff.getDiffType().getDescription()); Assertions.assertEquals("/properties/age", ageDiff.getPathUpdated()); - Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), zipCodeDiff.getDiffType().getDescription()); + Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), + zipCodeDiff.getDiffType().getDescription()); Assertions.assertEquals("/properties/zipcode", zipCodeDiff.getPathUpdated()); } - private Difference findDiffByPathUpdated(Set incompatibleDifferences, String path) { - for(CompatibilityDifference cd : incompatibleDifferences) { + private Difference findDiffByPathUpdated(Set incompatibleDifferences, + String path) { + for (CompatibilityDifference cd : incompatibleDifferences) { JsonSchemaCompatibilityDifference jsonSchemaCompatibilityDifference = (JsonSchemaCompatibilityDifference) cd; Difference diff = jsonSchemaCompatibilityDifference.getDifference(); - if(diff.getPathUpdated().equals(path)) { + if (diff.getPathUpdated().equals(path)) { return diff; } } @@ -100,51 +100,31 @@ private Difference findDiffByPathUpdated(Set incompatib @Test public void testProtobuf() { - String data = "syntax = \"proto3\";\n" + - "package test;\n" + - "\n" + - "message Channel {\n" + - " int64 id = 1;\n" + - " string name = 2;\n" + - " string description = 3;\n" + - "}\n" + - "\n" + - "message NextRequest {}\n" + - "message PreviousRequest {}\n" + - "\n" + - "service ChannelChanger {\n" + - "\trpc Next(stream NextRequest) returns (Channel);\n" + - "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + - "}\n"; + String data = "syntax = \"proto3\";\n" + "package test;\n" + "\n" + "message Channel {\n" + + " int64 id = 1;\n" + " string name = 2;\n" + " string description = 3;\n" + "}\n" + "\n" + + "message NextRequest {}\n" + "message PreviousRequest {}\n" + "\n" + + "service ChannelChanger {\n" + "\trpc Next(stream NextRequest) returns (Channel);\n" + + "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + "}\n"; String protobuf = ArtifactType.PROTOBUF; ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.emptyList(), asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.BACKWARD, Collections.emptyList(), + asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - String data2 = "syntax = \"proto3\";\n" + - "package test;\n" + - "\n" + - "message Channel {\n" + - " int64 id = 1;\n" + - " string name = 2;\n" + - //" reserved 3;\n" + - //" reserved \"description\";\n" + - " string description = 3;\n" + // TODO - " string newff = 4;\n" + - "}\n" + - "\n" + - "message NextRequest {}\n" + - "message PreviousRequest {}\n" + - "\n" + - "service ChannelChanger {\n" + - "\trpc Next(stream NextRequest) returns (Channel);\n" + - "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + - "}\n"; + String data2 = "syntax = \"proto3\";\n" + "package test;\n" + "\n" + "message Channel {\n" + + " int64 id = 1;\n" + " string name = 2;\n" + + // " reserved 3;\n" + + // " reserved \"description\";\n" + + " string description = 3;\n" + // TODO + " string newff = 4;\n" + "}\n" + "\n" + "message NextRequest {}\n" + + "message PreviousRequest {}\n" + "\n" + "service ChannelChanger {\n" + + "\trpc Next(stream NextRequest) returns (Channel);\n" + + "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + "}\n"; compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF)), @@ -152,63 +132,53 @@ public void testProtobuf() { Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - String data3 = "syntax = \"proto3\";\n" + - "package test;\n" + - "\n" + - "message Channel {\n" + - " int64 id = 1;\n" + - " string name = 2;\n" + - " string description = 4;\n" + - "}\n" + - "\n" + - "message NextRequest {}\n" + - "message PreviousRequest {}\n" + - "\n" + - "service ChannelChanger {\n" + - "\trpc Next(stream NextRequest) returns (Channel);\n" + - "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + - "}\n"; + String data3 = "syntax = \"proto3\";\n" + "package test;\n" + "\n" + "message Channel {\n" + + " int64 id = 1;\n" + " string name = 2;\n" + " string description = 4;\n" + "}\n" + "\n" + + "message NextRequest {}\n" + "message PreviousRequest {}\n" + "\n" + + "service ChannelChanger {\n" + "\trpc Next(stream NextRequest) returns (Channel);\n" + + "\trpc Previous(PreviousRequest) returns (stream Channel);\n" + "}\n"; compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.singletonList(asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(data3, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); + Collections.singletonList(asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF)), + asTypedContent(data3, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); } @Test public void testProtobufV2() { - String data = "syntax = \"proto2\";\n" + - "\n" + - "message ProtoSchema {\n" + - " required string message = 1;\n" + - " required int64 time = 2;\n" + - "}"; + String data = "syntax = \"proto2\";\n" + "\n" + "message ProtoSchema {\n" + + " required string message = 1;\n" + " required int64 time = 2;\n" + "}"; String protobuf = ArtifactType.PROTOBUF; ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.emptyList(), asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.BACKWARD, Collections.emptyList(), + asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - String data2 = "syntax = \"proto2\";\n" + - "\n" + - "message ProtoSchema {\n" + - " required string message = 1;\n" + - " required int64 time = 2;\n" + - " required string code = 3;\n" + - "}"; + String data2 = "syntax = \"proto2\";\n" + "\n" + "message ProtoSchema {\n" + + " required string message = 1;\n" + " required int64 time = 2;\n" + + " required string code = 3;\n" + "}"; compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD, - Collections.singletonList(asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(data2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); + Collections.singletonList(asTypedContent(data, ContentTypes.APPLICATION_PROTOBUF)), + asTypedContent(data2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); } @Test @@ -217,35 +187,44 @@ public void testProtobufBackwardTransitive() { ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - //adding a required field is not allowed since the first schema does not have it, should fail - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.BACKWARD_TRANSITIVE, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + // adding a required field is not allowed since the first schema does not have it, should fail + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.BACKWARD_TRANSITIVE, + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not backward compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); } - @Test public void testProtobufForward() { String protobuf = ArtifactType.PROTOBUF; ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - //adding a required field is not allowed, should fail - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FORWARD, + // adding a required field is not allowed, should fail + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.FORWARD, Collections.singletonList(asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not forward compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not forward compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); - //adding a required field is allowed since we're only checking forward, not forward transitive + // adding a required field is allowed since we're only checking forward, not forward transitive compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FORWARD, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); @@ -257,21 +236,28 @@ public void testProtobufForwardTransitive() { ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - //must pass, all the existing schemas are the same - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FORWARD_TRANSITIVE, - List.of(asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + // must pass, all the existing schemas are the same + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.FORWARD_TRANSITIVE, + List.of(asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - //adding a required field is not allowed since we're now checking forward transitive and the field is not present, not forward transitive + // adding a required field is not allowed since we're now checking forward transitive and the field is + // not present, not forward transitive compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FORWARD_TRANSITIVE, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not forward compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not forward compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); } @Test @@ -280,28 +266,40 @@ public void testProtobufFull() { ArtifactTypeUtilProvider provider = factory.getArtifactTypeProvider(protobuf); CompatibilityChecker checker = provider.getCompatibilityChecker(); - //adding a required field is not allowed since we're now checking forward transitive and the field is not present, not forward transitive - CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FULL, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); + // adding a required field is not allowed since we're now checking forward transitive and the field is + // not present, not forward transitive + CompatibilityExecutionResult compatibilityExecutionResult = checker.testCompatibility( + CompatibilityLevel.FULL, + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF)), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not fully compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); - - //must pass, since the schema is both backwards and forwards compatible with the latest existing schema + Assertions.assertEquals("The new version of the protobuf artifact is not fully compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); + + // must pass, since the schema is both backwards and forwards compatible with the latest existing + // schema compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FULL, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertTrue(compatibilityExecutionResult.isCompatible()); Assertions.assertTrue(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - //must fail, the schema is not compatible with the first existing schema + // must fail, the schema is not compatible with the first existing schema compatibilityExecutionResult = checker.testCompatibility(CompatibilityLevel.FULL_TRANSITIVE, - List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), + List.of(asTypedContent(PROTO_DATA, ContentTypes.APPLICATION_PROTOBUF), + asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF)), asTypedContent(PROTO_DATA_2, ContentTypes.APPLICATION_PROTOBUF), Collections.emptyMap()); Assertions.assertFalse(compatibilityExecutionResult.isCompatible()); Assertions.assertFalse(compatibilityExecutionResult.getIncompatibleDifferences().isEmpty()); - Assertions.assertEquals("The new version of the protobuf artifact is not fully compatible.", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getDescription()); - Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation().getContext()); + Assertions.assertEquals("The new version of the protobuf artifact is not fully compatible.", + compatibilityExecutionResult.getIncompatibleDifferences().iterator().next().asRuleViolation() + .getDescription()); + Assertions.assertEquals("/", compatibilityExecutionResult.getIncompatibleDifferences().iterator() + .next().asRuleViolation().getContext()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/JsonSerdeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/JsonSerdeTest.java index 7ee686e2a8..433156f2a2 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/JsonSerdeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/JsonSerdeTest.java @@ -24,13 +24,15 @@ public class JsonSerdeTest extends AbstractResourceTestBase { @Test public void testSchema() throws Exception { String groupId = "JsonSerdeTest_testSchema"; - String jsonSchema = new String(getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema.json").readAllBytes(), StandardCharsets.UTF_8); + String jsonSchema = new String( + getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema.json").readAllBytes(), + StandardCharsets.UTF_8); Assertions.assertNotNull(jsonSchema); String artifactId = generateArtifactId(); - long globalId = createArtifact(groupId, artifactId + "-value", ArtifactType.JSON, jsonSchema, ContentTypes.APPLICATION_JSON) - .getVersion().getGlobalId(); + long globalId = createArtifact(groupId, artifactId + "-value", ArtifactType.JSON, jsonSchema, + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); // make sure we have schema registered retry(() -> clientV3.ids().globalIds().byGlobalId(globalId).get()); @@ -38,7 +40,8 @@ public void testSchema() throws Exception { Person person = new Person("Ales", "Justin", 23); try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(clientV3, true); - JsonSchemaKafkaDeserializer deserializer = new JsonSchemaKafkaDeserializer<>(clientV3, true)) { + JsonSchemaKafkaDeserializer deserializer = new JsonSchemaKafkaDeserializer<>(clientV3, + true)) { Map configs = Map.of(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); serializer.configure(configs, false); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/VersionStateTest.java b/app/src/test/java/io/apicurio/registry/noprofile/VersionStateTest.java index 6aa1f89444..6935a8e1bc 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/VersionStateTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/VersionStateTest.java @@ -28,31 +28,37 @@ public void testSmoke() throws Exception { String groupId = "VersionStateTest_testSmoke"; String artifactId = generateArtifactId(); - createArtifact(groupId, artifactId, ArtifactType.JSON, "{\"type\": \"string\"}", ContentTypes.APPLICATION_JSON); + createArtifact(groupId, artifactId, ArtifactType.JSON, "{\"type\": \"string\"}", + ContentTypes.APPLICATION_JSON); createArtifactVersion(groupId, artifactId, "{\"type\": \"int\"}", ContentTypes.APPLICATION_JSON); createArtifactVersion(groupId, artifactId, "{\"type\": \"float\"}", ContentTypes.APPLICATION_JSON); - VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); Assertions.assertEquals("3", amd.getVersion()); // disable latest - + EditableVersionMetaData evmd = toEditableVersionMetaData(VersionState.DISABLED); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(amd.getVersion()).put(evmd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(amd.getVersion()).put(evmd); - VersionMetaData tvmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").get(); + VersionMetaData tvmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("3").get(); Assertions.assertEquals("3", tvmd.getVersion()); Assertions.assertEquals(VersionState.DISABLED, tvmd.getState()); // Latest artifact version (3) is disabled, this will return a previous version - VersionMetaData tamd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData tamd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); Assertions.assertEquals("2", tamd.getVersion()); Assertions.assertNull(tamd.getDescription()); // cannot get a disabled artifact version *content* var exception = assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").content().get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("3").content().get(); }); Assertions.assertEquals(404, exception.getErrorCode()); Assertions.assertEquals("VersionNotFoundException", exception.getName()); @@ -61,42 +67,52 @@ public void testSmoke() throws Exception { EditableVersionMetaData emd = new EditableVersionMetaData(); String description = "Testing artifact state"; emd.setDescription(description); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").put(emd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("3").put(emd); { - VersionMetaData innerAvmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").get(); + VersionMetaData innerAvmd = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("3").get(); Assertions.assertEquals("3", innerAvmd.getVersion()); Assertions.assertEquals(description, innerAvmd.getDescription()); } - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").put(toEditableVersionMetaData(VersionState.DEPRECATED)); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("3").put(toEditableVersionMetaData(VersionState.DEPRECATED)); - tamd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + tamd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertEquals("3", tamd.getVersion()); // should be back to v3 Assertions.assertEquals(tamd.getDescription(), description); - InputStream latestArtifact = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + InputStream latestArtifact = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get(); Assertions.assertNotNull(latestArtifact); latestArtifact.close(); - InputStream version = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("2").content().get(); + InputStream version = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("2").content().get(); Assertions.assertNotNull(version); version.close(); { - VersionMetaData innerAmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData innerAmd = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); Assertions.assertEquals("3", innerAmd.getVersion()); Assertions.assertEquals(description, innerAmd.getDescription()); } // can revert back to enabled from deprecated - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").put(toEditableVersionMetaData(VersionState.ENABLED)); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("3").put(toEditableVersionMetaData(VersionState.ENABLED)); { - VersionMetaData innerAmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData innerAmd = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); Assertions.assertEquals("3", innerAmd.getVersion()); // should still be latest (aka 3) Assertions.assertEquals(description, innerAmd.getDescription()); - VersionMetaData innerVmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").get(); + VersionMetaData innerVmd = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("1").get(); Assertions.assertNull(innerVmd.getDescription()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/ConfluentSerdeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/ConfluentSerdeTest.java index 5e1378b7db..30f1e80d94 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/ConfluentSerdeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/ConfluentSerdeTest.java @@ -11,7 +11,6 @@ import java.util.Properties; - @QuarkusTest public class ConfluentSerdeTest extends AbstractResourceTestBase { @@ -23,13 +22,14 @@ public class ConfluentSerdeTest extends AbstractResourceTestBase { public void testProtobufSchemaWithReferences() { Properties properties = new Properties(); String serverUrl = "http://localhost:%s/apis/ccompat/v7"; - properties.setProperty(KafkaProtobufSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, String.format(serverUrl, testPort)); + properties.setProperty(KafkaProtobufSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, + String.format(serverUrl, testPort)); properties.setProperty(KafkaProtobufSerializerConfig.AUTO_REGISTER_SCHEMAS, "true"); KafkaProtobufSerializer kafkaProtobufSerializer = new KafkaProtobufSerializer(); kafkaProtobufSerializer.configure(properties, false); - byte[] data = kafkaProtobufSerializer.serialize("test", TableNotification.newBuilder().build()); + byte[] data = kafkaProtobufSerializer.serialize("test", TableNotification.newBuilder().build()); KafkaProtobufDeserializer protobufKafkaDeserializer = new KafkaProtobufDeserializer(); protobufKafkaDeserializer.configure(properties, false); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/SubjectVersionStringTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/SubjectVersionStringTest.java index 9567b727f5..702c4d46bd 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/SubjectVersionStringTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/SubjectVersionStringTest.java @@ -16,7 +16,6 @@ import static io.restassured.RestAssured.given; - @QuarkusTest public class SubjectVersionStringTest extends AbstractResourceTestBase { @@ -33,49 +32,31 @@ public void testSubjectVersionString() throws Exception { var schemaContent2 = new SchemaContent(schema2); // Create first - var cid1 = given() - .log().all() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + var cid1 = given().log().all().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(objectMapper.writeValueAsString(schemaContent1)) - .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) - .extract().as(Schema.class); + .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT).then().statusCode(200).extract() + .as(Schema.class); Assertions.assertNotNull(cid1); - var versions1 = given() - .log().all() - .when() - .get("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) - .extract().as(new TypeRef>() {}); + var versions1 = given().log().all().when().get("/ccompat/v7/subjects/{subject}/versions", SUBJECT) + .then().statusCode(200).extract().as(new TypeRef>() { + }); Assertions.assertEquals(1, versions1.size()); var version1 = versions1.get(0); // Create second - var cid2 = given() - .log().all() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + var cid2 = given().log().all().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(objectMapper.writeValueAsString(schemaContent2)) - .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) - .extract().as(Schema.class); + .post("/ccompat/v7/subjects/{subject}/versions", SUBJECT).then().statusCode(200).extract() + .as(Schema.class); Assertions.assertNotNull(cid2); - var versions2 = given() - .log().all() - .when() - .get("/ccompat/v7/subjects/{subject}/versions", SUBJECT) - .then() - .statusCode(200) - .extract().as(new TypeRef>() {}); + var versions2 = given().log().all().when().get("/ccompat/v7/subjects/{subject}/versions", SUBJECT) + .then().statusCode(200).extract().as(new TypeRef>() { + }); Assertions.assertEquals(2, versions2.size()); versions2.removeAll(versions1); @@ -91,29 +72,19 @@ public void testSubjectVersionString() throws Exception { } private Schema getSubjectVersion(String subject, String version) { - var response = given() - .log().all() - .when() - .get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version) - .then() - .extract().asString(); + var response = given().log().all().when() + .get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version).then().extract() + .asString(); log.info("Response to get version {} of subject {} is: {}", version, subject, response); - return given() - .log().all() - .when() - .get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version) - .then() - .statusCode(200) - .extract().as(Schema.class); + return given().log().all().when() + .get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version).then() + .statusCode(200).extract().as(Schema.class); } private void getSubjectVersionFail(String subject, String version, int expectedStatusCode) { - given() - .when() - .get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version) - .then() + given().when().get("/ccompat/v7/subjects/{subject}/versions/{version}", subject, version).then() .statusCode(expectedStatusCode); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentClientTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentClientTest.java index 9f388e5f8a..3b55649e75 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentClientTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentClientTest.java @@ -71,21 +71,23 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; - @QuarkusTest -@SuppressWarnings({"unchecked", "rawtypes"}) +@SuppressWarnings({ "unchecked", "rawtypes" }) public class ConfluentClientTest extends AbstractResourceTestBase { public SchemaRegistryClient buildClient() { - final List schemaProviders = Arrays - .asList(new JsonSchemaProvider(), new AvroSchemaProvider(), new ProtobufSchemaProvider()); - return new CachedSchemaRegistryClient(new RestService("http://localhost:" + testPort + "/apis/ccompat/v7"), 3, schemaProviders, null, Map.of(Headers.GROUP_ID, "confluentV7-test-group")); + final List schemaProviders = Arrays.asList(new JsonSchemaProvider(), + new AvroSchemaProvider(), new ProtobufSchemaProvider()); + return new CachedSchemaRegistryClient( + new RestService("http://localhost:" + testPort + "/apis/ccompat/v7"), 3, schemaProviders, + null, Map.of(Headers.GROUP_ID, "confluentV7-test-group")); } @AfterEach protected void afterEach() throws Exception { try { - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().delete(); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .delete(); } catch (Exception ignored) { } } @@ -99,11 +101,13 @@ public void testSerdeProtobufSchema() { final Map config = new HashMap<>(); config.put(KafkaProtobufSerializerConfig.AUTO_REGISTER_SCHEMAS, true); - config.put(KafkaProtobufSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:" + testPort + "/apis/ccompat/v7"); - config.put(KafkaProtobufDeserializerConfig.SPECIFIC_PROTOBUF_VALUE_TYPE, TestCmmn.UUID.class.getName()); + config.put(KafkaProtobufSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, + "http://localhost:" + testPort + "/apis/ccompat/v7"); + config.put(KafkaProtobufDeserializerConfig.SPECIFIC_PROTOBUF_VALUE_TYPE, + TestCmmn.UUID.class.getName()); try (KafkaProtobufSerializer serializer = new KafkaProtobufSerializer<>(client); - KafkaProtobufDeserializer deserializer = new KafkaProtobufDeserializer<>(client)) { + KafkaProtobufDeserializer deserializer = new KafkaProtobufDeserializer<>(client)) { serializer.configure(config, false); deserializer.configure(config, false); @@ -128,26 +132,26 @@ public void testOrphanedContent() throws Exception { String orphanedContentSchema = "{\"type\":\"record\",\"name\":\"testOrphanedContent\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"; - //Create schema with the first id + // Create schema with the first id ParsedSchema schema = new AvroSchema(orphanedContentSchema); int id1 = client.register(subject, schema); // Reset the client cache so that the next line actually does what we want. client.reset(); - //The schema can be fetched with this particular id + // The schema can be fetched with this particular id TestUtils.retry(() -> client.getSchemaById(id1)); - //First sotft delete subject, then hard delete, the content must be claimed as orphaned + // First sotft delete subject, then hard delete, the content must be claimed as orphaned client.deleteSubject(subject); client.deleteSubject(subject, true); - //Register schema again, the id must be different + // Register schema again, the id must be different int id2 = client.register(subject, schema); Assertions.assertNotEquals(id1, id2); - //The schema must be retrievable using the new id + // The schema must be retrievable using the new id TestUtils.retry(() -> client.getSchemaById(id2)); } @@ -156,7 +160,8 @@ public void testSimpleOps() throws Exception { SchemaRegistryClient client = buildClient(); final String subject = generateArtifactId(); - ParsedSchema schema1 = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"); + ParsedSchema schema1 = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"); int id1 = client.register(subject, schema1); // Reset the client cache so that the next line actually does what we want. @@ -164,7 +169,8 @@ public void testSimpleOps() throws Exception { TestUtils.retry(() -> client.getSchemaById(id1)); - ParsedSchema schema2 = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"f2\",\"type\":\"string\"}]}"); + ParsedSchema schema2 = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"f2\",\"type\":\"string\"}]}"); int id2 = client.register(subject, schema2); TestUtils.retry(() -> client.getSchemaById(id2)); @@ -218,7 +224,7 @@ public void testSerdeAvro() throws Exception { }); try (KafkaAvroSerializer serializer = new KafkaAvroSerializer(client); - KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(client)) { + KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(client)) { GenericData.Record record = new GenericData.Record(new Schema.Parser().parse(rawSchema)); record.put("bar", "somebar"); @@ -241,10 +247,13 @@ public void testSerdeJsonSchema() { final Properties config = new Properties(); config.put(KafkaJsonSchemaSerializerConfig.AUTO_REGISTER_SCHEMAS, true); - config.put(KafkaJsonSchemaSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:" + testPort + "/apis/ccompat/v7"); + config.put(KafkaJsonSchemaSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, + "http://localhost:" + testPort + "/apis/ccompat/v7"); - try (KafkaJsonSchemaSerializer serializer = new KafkaJsonSchemaSerializer(client, new HashMap(config)); - KafkaJsonSchemaDeserializer deserializer = new KafkaJsonSchemaDeserializer(client, config, SchemaContent.class)) { + try ( + KafkaJsonSchemaSerializer serializer = new KafkaJsonSchemaSerializer(client, new HashMap(config)); + KafkaJsonSchemaDeserializer deserializer = new KafkaJsonSchemaDeserializer(client, config, + SchemaContent.class)) { byte[] bytes = serializer.serialize(subject, schemaContent); Object deserialized = deserializer.deserialize(subject, bytes); @@ -271,7 +280,8 @@ public void testDelete() throws Exception { String subject = generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); int id = client.register(subject, schema); client.reset(); @@ -308,7 +318,8 @@ public void testGlobalRule() throws Exception { clientV3.admin().rules().post(createRule); String subject = generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); int id = client.register(subject, schema); client.reset(); @@ -327,66 +338,55 @@ public void testGlobalRule() throws Exception { }); } - @Test public void testConverter_PreRegisterSchema() { String subject = generateArtifactId(); String name = "myr" + ThreadLocalRandom.current().nextInt(0, Integer.MAX_VALUE); - testConverter( - subject, - name, - false, - (client) -> { - try { - ParsedSchema schema = new AvroSchema(String.format("{\"type\":\"record\",\"name\":\"%s\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}],\"connect.name\":\"%s\"}", name, name)); - int id = client.register(subject + "-value", schema); - client.reset(); - // can be async ... - ParsedSchema retry = retry(() -> client.getSchemaById(id)); - Assertions.assertNotNull(retry); - } catch (Exception e) { - throw new IllegalStateException(e); - } - }, - (c, b) -> { - } - ); + testConverter(subject, name, false, (client) -> { + try { + ParsedSchema schema = new AvroSchema(String.format( + "{\"type\":\"record\",\"name\":\"%s\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}],\"connect.name\":\"%s\"}", + name, name)); + int id = client.register(subject + "-value", schema); + client.reset(); + // can be async ... + ParsedSchema retry = retry(() -> client.getSchemaById(id)); + Assertions.assertNotNull(retry); + } catch (Exception e) { + throw new IllegalStateException(e); + } + }, (c, b) -> { + }); } @Test public void testConverter_AutoRegisterSchema() { String name = "myr" + ThreadLocalRandom.current().nextInt(0, Integer.MAX_VALUE); - testConverter( - generateArtifactId(), - name, - true, - (c) -> { - }, - (client, bytes) -> { - try { - client.reset(); - ParsedSchema retry = retry(() -> { - ByteBuffer buffer = ByteBuffer.wrap(bytes); - buffer.get(); // magic-byte - int id = buffer.getInt(); - return client.getSchemaById(id); - }); - Assertions.assertNotNull(retry); - } catch (Exception e) { - throw new IllegalStateException(e); - } - } - ); + testConverter(generateArtifactId(), name, true, (c) -> { + }, (client, bytes) -> { + try { + client.reset(); + ParsedSchema retry = retry(() -> { + ByteBuffer buffer = ByteBuffer.wrap(bytes); + buffer.get(); // magic-byte + int id = buffer.getInt(); + return client.getSchemaById(id); + }); + Assertions.assertNotNull(retry); + } catch (Exception e) { + throw new IllegalStateException(e); + } + }); } - private void testConverter(String subject, String name, boolean autoRegister, Consumer pre, BiConsumer post) { + private void testConverter(String subject, String name, boolean autoRegister, + Consumer pre, BiConsumer post) { SchemaRegistryClient client = buildClient(); pre.accept(client); - org.apache.kafka.connect.data.Schema cs = - org.apache.kafka.connect.data.SchemaBuilder.struct() - .name(name).field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA); + org.apache.kafka.connect.data.Schema cs = org.apache.kafka.connect.data.SchemaBuilder.struct() + .name(name).field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA); Struct struct = new Struct(cs); struct.put("bar", "somebar"); @@ -451,10 +451,12 @@ public void testBasicAvro() throws Exception { String subject2 = "testBasic2"; int schemasInSubject1 = 10; List allVersionsInSubject1 = new ArrayList<>(); - List allSchemasInSubject1 = ConfluentTestUtils.getRandomCanonicalAvroString(schemasInSubject1); + List allSchemasInSubject1 = ConfluentTestUtils + .getRandomCanonicalAvroString(schemasInSubject1); int schemasInSubject2 = 5; List allVersionsInSubject2 = new ArrayList<>(); - List allSchemasInSubject2 = ConfluentTestUtils.getRandomCanonicalAvroString(schemasInSubject2); + List allSchemasInSubject2 = ConfluentTestUtils + .getRandomCanonicalAvroString(schemasInSubject2); List allSubjects = new ArrayList<>(); List schemaIds = new ArrayList<>(); @@ -462,13 +464,16 @@ public void testBasicAvro() throws Exception { // test getAllVersions with no existing data try { confluentClient.getAllVersions(subject1); - fail("Getting all versions from non-existing subject1 should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); + fail("Getting all versions from non-existing subject1 should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } // test getAllSubjects with no existing data - assertEquals(allSubjects, confluentClient.getAllSubjects(), "Getting all subjects should return empty"); + assertEquals(allSubjects, confluentClient.getAllSubjects(), + "Getting all subjects should return empty"); // test registering and verifying new schemas in subject1 for (int i = 0; i < schemasInSubject1; i++) { @@ -485,7 +490,8 @@ public void testBasicAvro() throws Exception { for (int i = 0; i < schemasInSubject1; i++) { String schemaString = allSchemasInSubject1.get(i); int foundId = confluentClient.registerSchema(schemaString, subject1, true).getId(); - assertEquals((int) schemaIds.get(i), foundId, "Re-registering an existing schema should return the existing version"); + assertEquals((int) schemaIds.get(i), foundId, + "Re-registering an existing schema should return the existing version"); } // test registering schemas in subject2 @@ -498,11 +504,14 @@ public void testBasicAvro() throws Exception { allSubjects.add(subject2); // test getAllVersions with existing data - assertEquals(allVersionsInSubject1, confluentClient.getAllVersions(subject1), "Getting all versions from subject1 should match all registered versions"); - assertEquals(allVersionsInSubject2, confluentClient.getAllVersions(subject2), "Getting all versions from subject2 should match all registered versions"); + assertEquals(allVersionsInSubject1, confluentClient.getAllVersions(subject1), + "Getting all versions from subject1 should match all registered versions"); + assertEquals(allVersionsInSubject2, confluentClient.getAllVersions(subject2), + "Getting all versions from subject2 should match all registered versions"); // test getAllSubjects with existing data - assertEquals(allSubjects, confluentClient.getAllSubjects(), "Getting all subjects should match all registered subjects"); + assertEquals(allSubjects, confluentClient.getAllSubjects(), + "Getting all subjects should match all registered subjects"); } @Test @@ -510,15 +519,17 @@ public void testRegisterSameSchemaOnDifferentSubject() throws Exception { String schema = ConfluentTestUtils.getRandomCanonicalAvroString(1).get(0); int id1 = confluentClient.registerSchema(schema, "subject1", true).getId(); int id2 = confluentClient.registerSchema(schema, "subject2", true).getId(); - assertEquals(id1, id2, "Registering the same schema under different subjects should return the same id"); + assertEquals(id1, id2, + "Registering the same schema under different subjects should return the same id"); } @Test public void testRegisterInvalidSchemaBadType() throws Exception { String subject = "testRegisterInvalidSchemaBadType"; - //Invalid Field Type 'str' - String badSchemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"str\",\"name\":\"field1\"}]}"; + // Invalid Field Type 'str' + String badSchemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"str\",\"name\":\"field1\"}]}"; try { new org.apache.avro.Schema.Parser().parse(badSchemaString); @@ -528,7 +539,8 @@ public void testRegisterInvalidSchemaBadType() throws Exception { try { confluentClient.registerSchema(badSchemaString, subject, true); - fail("Registering schema with invalid field type should fail with " + ErrorCode.INVALID_SCHEMA.value() + " (invalid schema)"); + fail("Registering schema with invalid field type should fail with " + + ErrorCode.INVALID_SCHEMA.value() + " (invalid schema)"); } catch (RestClientException rce) { assertEquals(ErrorCode.INVALID_SCHEMA.value(), rce.getErrorCode(), "Invalid schema"); } @@ -538,13 +550,16 @@ public void testRegisterInvalidSchemaBadType() throws Exception { public void testRegisterInvalidSchemaBadReference() throws Exception { String subject = "testRegisterInvalidSchemaBadReference"; - //Invalid Reference + // Invalid Reference SchemaReference invalidReference = new SchemaReference("invalid.schema", "badSubject", 1); - String schemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"field1\"}]}"; + String schemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"field1\"}]}"; try { - confluentClient.registerSchema(schemaString, "AVRO", Collections.singletonList(invalidReference), subject, true); - fail("Registering schema with invalid reference should fail with " + ErrorCode.INVALID_SCHEMA.value() + " (invalid schema)"); + confluentClient.registerSchema(schemaString, "AVRO", Collections.singletonList(invalidReference), + subject, true); + fail("Registering schema with invalid reference should fail with " + + ErrorCode.INVALID_SCHEMA.value() + " (invalid schema)"); } catch (RestClientException rce) { assertEquals(ErrorCode.INVALID_SCHEMA.value(), rce.getErrorCode(), "Invalid schema"); } @@ -579,10 +594,12 @@ public void testIncompatibleSchemaLookupBySubject() throws Exception { String subject = "testSubject"; // Make two incompatible schemas - field 'f' has different types - String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; + String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; String schema1 = new AvroSchema(schema1String).canonicalString(); - String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"int\",\"name\":" + "\"f" + "\"}]}"; + String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"int\",\"name\":" + "\"f" + "\"}]}"; String schema2 = new AvroSchema(schema2String).canonicalString(); // ensure registering incompatible schemas will raise an error @@ -592,7 +609,8 @@ public void testIncompatibleSchemaLookupBySubject() throws Exception { // error response from Avro confluentClient.registerSchema(schema1, subject); int versionOfRegisteredSchema = confluentClient.lookUpSubjectVersion(schema1, subject).getVersion(); - boolean isCompatible = confluentClient.testCompatibility(schema2, subject, String.valueOf(versionOfRegisteredSchema)).isEmpty(); + boolean isCompatible = confluentClient + .testCompatibility(schema2, subject, String.valueOf(versionOfRegisteredSchema)).isEmpty(); assertFalse(isCompatible, "Schema should be incompatible with specified version"); } @@ -600,13 +618,16 @@ public void testIncompatibleSchemaLookupBySubject() throws Exception { public void testIncompatibleSchemaBySubject() throws Exception { String subject = "testSubject"; - String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"},{\"type\":\"string\",\"name\":\"f2\"}]}"; + String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"},{\"type\":\"string\",\"name\":\"f2\"}]}"; String schema1 = new AvroSchema(schema1String).canonicalString(); - String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"}]}"; + String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}]}"; String schema2 = new AvroSchema(schema2String).canonicalString(); - String schema3String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"},{\"type\":\"string\",\"name\":\"f3\"}]}"; + String schema3String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"},{\"type\":\"string\",\"name\":\"f3\"}]}"; String schema3 = new AvroSchema(schema3String).canonicalString(); confluentClient.registerSchema(schema1, subject); @@ -614,7 +635,7 @@ public void testIncompatibleSchemaBySubject() throws Exception { confluentClient.updateCompatibility(CompatibilityLevel.FORWARD_TRANSITIVE.name, subject); - //schema3 is compatible with schema2, but not compatible with schema1 + // schema3 is compatible with schema2, but not compatible with schema1 boolean isCompatible = confluentClient.testCompatibility(schema3, subject, "latest").isEmpty(); assertTrue(isCompatible, "Schema is compatible with the latest version"); isCompatible = confluentClient.testCompatibility(schema3, subject, null).isEmpty(); @@ -623,7 +644,8 @@ public void testIncompatibleSchemaBySubject() throws Exception { confluentClient.registerSchema(schema3String, subject); fail("Schema register should fail since schema is incompatible"); } catch (RestClientException e) { - assertEquals(HTTP_CONFLICT, e.getErrorCode(), "Schema register should fail since schema is incompatible"); + assertEquals(HTTP_CONFLICT, e.getErrorCode(), + "Schema register should fail since schema is incompatible"); assertFalse(e.getMessage().isEmpty()); } } @@ -634,32 +656,41 @@ public void testSchemaRegistrationUnderDiffSubjects() throws Exception { String subject2 = "testSchemaRegistrationUnderDiffSubjects2"; // Make two incompatible schemas - field 'f' has different types - String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; + String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; String schema1 = new AvroSchema(schemaString1).canonicalString(); - String schemaString2 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"int\",\"name\":" + "\"foo" + "\"}]}"; + String schemaString2 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"int\",\"name\":" + "\"foo" + "\"}]}"; String schema2 = new AvroSchema(schemaString2).canonicalString(); confluentClient.registerSchema(schema1, subject1); - int versionOfRegisteredSchema1Subject1 = confluentClient.lookUpSubjectVersion(schema1, subject1).getVersion(); - assertEquals(1, versionOfRegisteredSchema1Subject1, "1st schema under subject1 should have version 1"); + int versionOfRegisteredSchema1Subject1 = confluentClient.lookUpSubjectVersion(schema1, subject1) + .getVersion(); + assertEquals(1, versionOfRegisteredSchema1Subject1, + "1st schema under subject1 should have version 1"); int idOfRegisteredSchema2Subject1 = confluentClient.registerSchema(schema2, subject1); - int versionOfRegisteredSchema2Subject1 = confluentClient.lookUpSubjectVersion(schema2, subject1).getVersion(); - assertEquals(2, versionOfRegisteredSchema2Subject1, "2nd schema under subject1 should have version 2"); + int versionOfRegisteredSchema2Subject1 = confluentClient.lookUpSubjectVersion(schema2, subject1) + .getVersion(); + assertEquals(2, versionOfRegisteredSchema2Subject1, + "2nd schema under subject1 should have version 2"); int idOfRegisteredSchema2Subject2 = confluentClient.registerSchema(schema2, subject2); - assertEquals(idOfRegisteredSchema2Subject1, idOfRegisteredSchema2Subject2, "Since schema is globally registered but not under subject2, id should not change"); + assertEquals(idOfRegisteredSchema2Subject1, idOfRegisteredSchema2Subject2, + "Since schema is globally registered but not under subject2, id should not change"); } @Test public void testConfigDefaults() throws Exception { - assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Default compatibility level should be none for this test instance"); + assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Default compatibility level should be none for this test instance"); // change it to forward confluentClient.updateCompatibility(CompatibilityLevel.FORWARD.name, null); - assertEquals(FORWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), "New compatibility level should be forward for this test instance"); + assertEquals(FORWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "New compatibility level should be forward for this test instance"); } @Test @@ -670,59 +701,71 @@ public void testNonExistentSubjectConfigChange() throws Exception { } catch (RestClientException e) { fail("Changing config for an invalid subject should succeed"); } - assertEquals(FORWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), "New compatibility level for this subject should be forward"); + assertEquals(FORWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), + "New compatibility level for this subject should be forward"); } @Test public void testSubjectConfigChange() throws Exception { String subject = "testSubjectConfigChange"; - assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Default compatibility level should be none for this test instance"); + assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Default compatibility level should be none for this test instance"); // change subject compatibility to forward confluentClient.updateCompatibility(CompatibilityLevel.FORWARD.name, subject); - assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Global compatibility level should remain none for this test instance"); + assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Global compatibility level should remain none for this test instance"); - assertEquals(FORWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), "New compatibility level for this subject should be forward"); + assertEquals(FORWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), + "New compatibility level for this subject should be forward"); } @Test public void testGlobalConfigChange() throws Exception { - assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Default compatibility level should be none for this test instance"); + assertEquals(NONE.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Default compatibility level should be none for this test instance"); // change subject compatibility to forward confluentClient.updateCompatibility(CompatibilityLevel.FORWARD.name, null); - assertEquals(FORWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), "New Global compatibility level should be forward"); + assertEquals(FORWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "New Global compatibility level should be forward"); // change subject compatibility to backward confluentClient.updateCompatibility(BACKWARD.name, null); - assertEquals(BACKWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), "New Global compatibility level should be backward"); + assertEquals(BACKWARD.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "New Global compatibility level should be backward"); } @Test public void testGetSchemaNonExistingId() throws Exception { try { confluentClient.getId(Integer.MAX_VALUE); - fail("Schema lookup by missing id should fail with " + ErrorCode.SCHEMA_NOT_FOUND.value() + " (schema not found)"); + fail("Schema lookup by missing id should fail with " + ErrorCode.SCHEMA_NOT_FOUND.value() + + " (schema not found)"); } catch (RestClientException rce) { // this is expected. - assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing id"); + assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing id"); } } @Test public void testGetSchemaTypes() throws Exception { - assertEquals(new HashSet<>(Arrays.asList("AVRO", "JSON", "PROTOBUF")), new HashSet<>(confluentClient.getSchemaTypes())); + assertEquals(new HashSet<>(Arrays.asList("AVRO", "JSON", "PROTOBUF")), + new HashSet<>(confluentClient.getSchemaTypes())); } @Test public void testListVersionsNonExistingSubject() throws Exception { try { confluentClient.getAllVersions("Invalid"); - fail("Getting all versions of missing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); + fail("Getting all versions of missing subject should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); } catch (RestClientException rce) { // this is expected. - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } } @@ -731,10 +774,12 @@ public void testGetVersionNonExistentSubject() throws Exception { // test getVersion on a non-existing subject try { confluentClient.getVersion("non-existing-subject", 1); - fail("Getting version of missing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); + fail("Getting version of missing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + + " (subject not found)"); } catch (RestClientException e) { // this is expected. - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), e.getErrorCode(), "Unregistered subject shouldn't be found in getVersion()"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), e.getErrorCode(), + "Unregistered subject shouldn't be found in getVersion()"); } } @@ -746,10 +791,12 @@ public void testGetNonExistingVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schema, subject); try { confluentClient.getVersion(subject, 200); - fail("Getting unregistered version should fail with " + ErrorCode.VERSION_NOT_FOUND.value() + " (version not found)"); + fail("Getting unregistered version should fail with " + ErrorCode.VERSION_NOT_FOUND.value() + + " (version not found)"); } catch (RestClientException e) { // this is expected. - assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), e.getErrorCode(), "Unregistered version shouldn't be found"); + assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), e.getErrorCode(), + "Unregistered version shouldn't be found"); } } @@ -761,10 +808,12 @@ public void testGetInvalidVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schema, subject); try { confluentClient.getVersion(subject, 0); - fail("Getting invalid version should fail with " + ErrorCode.INVALID_VERSION + " (invalid version)"); + fail("Getting invalid version should fail with " + ErrorCode.INVALID_VERSION + + " (invalid version)"); } catch (RestClientException e) { // this is expected. - assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), e.getErrorCode(), "Invalid version shouldn't be found"); + assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), e.getErrorCode(), + "Invalid version shouldn't be found"); } } @@ -775,10 +824,13 @@ public void testGetVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals(schemas.get(0), confluentClient.getVersion(subject, 1).getSchema(), "Version 1 schema should match"); + assertEquals(schemas.get(0), confluentClient.getVersion(subject, 1).getSchema(), + "Version 1 schema should match"); - assertEquals(schemas.get(1), confluentClient.getVersion(subject, 2).getSchema(), "Version 2 schema should match"); - assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), "Latest schema should be the same as version 2"); + assertEquals(schemas.get(1), confluentClient.getVersion(subject, 2).getSchema(), + "Version 2 schema should match"); + assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), + "Latest schema should be the same as version 2"); } @Test @@ -788,7 +840,8 @@ public void testGetLatestVersionSchemaOnly() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals(schemas.get(1), confluentClient.getLatestVersionSchemaOnly(subject), "Latest schema should be the same as version 2"); + assertEquals(schemas.get(1), confluentClient.getLatestVersionSchemaOnly(subject), + "Latest schema should be the same as version 2"); } @Test @@ -797,7 +850,8 @@ public void testGetVersionSchemaOnly() throws Exception { String subject = "test"; ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); - assertEquals(schemas.get(0), confluentClient.getVersionSchemaOnly(subject, 1), "Retrieved schema should be the same as version 1"); + assertEquals(schemas.get(0), confluentClient.getVersionSchemaOnly(subject, 1), + "Retrieved schema should be the same as version 1"); } @Test @@ -818,21 +872,25 @@ public void testSchemaReferences() throws Exception { // the newly registered schema should be immediately readable on the leader assertEquals(schemas.get(1), schemaString.getSchemaString(), "Registered schema should be found"); - assertEquals(Collections.singletonList(ref), schemaString.getReferences(), "Schema references should be found"); + assertEquals(Collections.singletonList(ref), schemaString.getReferences(), + "Schema references should be found"); List refs = confluentClient.getReferencedBy(subject, 1); assertEquals(registeredId, refs.get(0).intValue()); try { - confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, String.valueOf(1)); + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, + String.valueOf(1)); fail("Deleting reference should fail with " + ErrorCode.REFERENCE_EXISTS.value()); } catch (RestClientException rce) { assertEquals(ErrorCode.REFERENCE_EXISTS.value(), rce.getErrorCode(), "Reference found"); } - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, referrer, "1")); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, referrer, "1")); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1")); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1")); } @Test @@ -885,7 +943,8 @@ public void testSchemaReferencesMultipleLevels() throws Exception { } """; - ConfluentTestUtils.registerAndVerifySchema(confluentClient, new AvroSchema(sharedRef).canonicalString(), "shared"); + ConfluentTestUtils.registerAndVerifySchema(confluentClient, + new AvroSchema(sharedRef).canonicalString(), "shared"); RegisterSchemaRequest request = new RegisterSchemaRequest(); request.setSchema(ref1); @@ -910,7 +969,8 @@ public void testSchemaReferencesMultipleLevels() throws Exception { // the newly registered schema should be immediately readable on the leader assertEquals(root, schemaString.getSchemaString(), "Registered schema should be found"); - assertEquals(Arrays.asList(r1, r2), schemaString.getReferences(), "Schema references should be found"); + assertEquals(Arrays.asList(r1, r2), schemaString.getReferences(), + "Schema references should be found"); } @Test @@ -921,32 +981,45 @@ public void testSchemaMissingReferences() { request.setSchema(schemas.get(1)); request.setReferences(Collections.emptyList()); - assertThrows(RestClientException.class, () -> confluentClient.registerSchema(request, "referrer", false)); + assertThrows(RestClientException.class, + () -> confluentClient.registerSchema(request, "referrer", false)); } @Test public void testSchemaNormalization() throws Exception { String subject1 = "testSchemaNormalization"; - String reference1 = "{\"type\":\"record\"," + "\"name\":\"Subrecord1\"," + "\"namespace\":\"otherns\"," + "\"fields\":" + "[{\"name\":\"field1\",\"type\":\"string\"}]}"; + String reference1 = "{\"type\":\"record\"," + "\"name\":\"Subrecord1\"," + + "\"namespace\":\"otherns\"," + "\"fields\":" + + "[{\"name\":\"field1\",\"type\":\"string\"}]}"; ConfluentTestUtils.registerAndVerifySchema(confluentClient, reference1, "ref1"); - String reference2 = "{\"type\":\"record\"," + "\"name\":\"Subrecord2\"," + "\"namespace\":\"otherns\"," + "\"fields\":" + "[{\"name\":\"field2\",\"type\":\"string\"}]}"; + String reference2 = "{\"type\":\"record\"," + "\"name\":\"Subrecord2\"," + + "\"namespace\":\"otherns\"," + "\"fields\":" + + "[{\"name\":\"field2\",\"type\":\"string\"}]}"; ConfluentTestUtils.registerAndVerifySchema(confluentClient, reference2, "ref2"); SchemaReference ref1 = new SchemaReference("otherns.Subrecord1", "ref1", 1); SchemaReference ref2 = new SchemaReference("otherns.Subrecord2", "ref2", 1); // Two versions of same schema, the second one with extra spaces and line breaks - String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":{\"type\":\"int\"},\"name\":\"field0" + "\"}," + "{\"name\":\"field1\",\"type\":\"otherns.Subrecord1\"}," + "{\"name\":\"field2\",\"type\":\"otherns.Subrecord2\"}" + "]," + "\"extraMetadata\": {\"a\": 1, \"b\": 2}" + "}"; - String schemaString2 = "{\"type\":\"record\",\n" + "\"name\":\"myrecord\",\n" + "\"fields\":" + "[{\"type\":{\"type\":\"int\"},\"name\":\"field0" + "\"},\n" + "{\"name\":\"field1\",\"type\":\"otherns.Subrecord1\"}," + "{\"name\":\"field2\",\"type\":\"otherns.Subrecord2\"}" + "]," + "\"extraMetadata\": {\"a\": 1, \"b\": 2}" + "}"; + String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":{\"type\":\"int\"},\"name\":\"field0" + "\"}," + + "{\"name\":\"field1\",\"type\":\"otherns.Subrecord1\"}," + + "{\"name\":\"field2\",\"type\":\"otherns.Subrecord2\"}" + "]," + + "\"extraMetadata\": {\"a\": 1, \"b\": 2}" + "}"; + String schemaString2 = "{\"type\":\"record\",\n" + "\"name\":\"myrecord\",\n" + "\"fields\":" + + "[{\"type\":{\"type\":\"int\"},\"name\":\"field0" + "\"},\n" + + "{\"name\":\"field1\",\"type\":\"otherns.Subrecord1\"}," + + "{\"name\":\"field2\",\"type\":\"otherns.Subrecord2\"}" + "]," + + "\"extraMetadata\": {\"a\": 1, \"b\": 2}" + "}"; RegisterSchemaRequest registerRequest = new RegisterSchemaRequest(); registerRequest.setSchema(schemaString1); registerRequest.setReferences(Arrays.asList(ref1, ref2)); confluentClient.registerSchema(registerRequest, subject1, true); - int versionOfRegisteredSchema1Subject1 = confluentClient.lookUpSubjectVersion(registerRequest, subject1, false, false).getVersion(); - + int versionOfRegisteredSchema1Subject1 = confluentClient + .lookUpSubjectVersion(registerRequest, subject1, false, false).getVersion(); // send schema with all references resolved RegisterSchemaRequest lookUpRequest = new RegisterSchemaRequest(); @@ -955,11 +1028,15 @@ public void testSchemaNormalization() throws Exception { parser.parse(reference2); AvroSchema resolvedSchema = new AvroSchema(parser.parse(schemaString2)); lookUpRequest.setSchema(resolvedSchema.canonicalString()); - versionOfRegisteredSchema1Subject1 = confluentClient.lookUpSubjectVersion(lookUpRequest, subject1, true, false).getVersion(); - assertEquals(1, versionOfRegisteredSchema1Subject1, "1st schema under subject1 should have version 1"); - - - String recordInvalidDefaultSchema = "{\"namespace\": \"namespace\",\n" + " \"type\": \"record\",\n" + " \"name\": \"test\",\n" + " \"fields\": [\n" + " {\"name\": \"string_default\", \"type\": \"string\", \"default\": null}\n" + "]\n" + "}"; + versionOfRegisteredSchema1Subject1 = confluentClient + .lookUpSubjectVersion(lookUpRequest, subject1, true, false).getVersion(); + assertEquals(1, versionOfRegisteredSchema1Subject1, + "1st schema under subject1 should have version 1"); + + String recordInvalidDefaultSchema = "{\"namespace\": \"namespace\",\n" + " \"type\": \"record\",\n" + + " \"name\": \"test\",\n" + " \"fields\": [\n" + + " {\"name\": \"string_default\", \"type\": \"string\", \"default\": null}\n" + "]\n" + + "}"; registerRequest = new RegisterSchemaRequest(); registerRequest.setSchema(recordInvalidDefaultSchema); try { @@ -976,24 +1053,29 @@ public void testBad() throws Exception { List allSubjects = new ArrayList<>(); // test getAllSubjects with no existing data - assertEquals(allSubjects, confluentClient.getAllSubjects(), "Getting all subjects should return empty"); + assertEquals(allSubjects, confluentClient.getAllSubjects(), + "Getting all subjects should return empty"); try { - ConfluentTestUtils.registerAndVerifySchema(confluentClient, ConfluentTestUtils.getBadSchema(), subject1); + ConfluentTestUtils.registerAndVerifySchema(confluentClient, ConfluentTestUtils.getBadSchema(), + subject1); fail("Registering bad schema should fail with " + ErrorCode.INVALID_SCHEMA.value()); } catch (RestClientException rce) { assertEquals(ErrorCode.INVALID_SCHEMA.value(), rce.getErrorCode(), "Invalid schema"); } try { - ConfluentTestUtils.registerAndVerifySchema(confluentClient, ConfluentTestUtils.getRandomCanonicalAvroString(1).get(0), List.of(new SchemaReference("bad", "bad", 100)), subject1); + ConfluentTestUtils.registerAndVerifySchema(confluentClient, + ConfluentTestUtils.getRandomCanonicalAvroString(1).get(0), + List.of(new SchemaReference("bad", "bad", 100)), subject1); fail("Registering bad reference should fail with " + ErrorCode.INVALID_SCHEMA.value()); } catch (RestClientException rce) { assertEquals(ErrorCode.INVALID_SCHEMA.value(), rce.getErrorCode(), "Invalid schema"); } // test getAllSubjects with existing data - assertEquals(allSubjects, confluentClient.getAllSubjects(), "Getting all subjects should match all registered subjects"); + assertEquals(allSubjects, confluentClient.getAllSubjects(), + "Getting all subjects should match all registered subjects"); } @Test @@ -1001,7 +1083,8 @@ public void testLookUpSchemaUnderNonExistentSubject() throws Exception { String schema = ConfluentTestUtils.getRandomCanonicalAvroString(1).get(0); try { confluentClient.lookUpSubjectVersion(schema, "non-existent-subject"); - fail("Looking up schema under missing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); + fail("Looking up schema under missing subject should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Subject not found"); } @@ -1016,7 +1099,8 @@ public void testLookUpNonExistentSchemaUnderSubject() throws Exception { try { confluentClient.lookUpSubjectVersion(schemas.get(1), subject); - fail("Looking up missing schema under subject should fail with " + ErrorCode.SCHEMA_NOT_FOUND.value() + " (schema not found)"); + fail("Looking up missing schema under subject should fail with " + + ErrorCode.SCHEMA_NOT_FOUND.value() + " (schema not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), "Schema not found"); } @@ -1038,13 +1122,16 @@ public void testGetVersionsAssociatedWithSchemaId() throws Exception { assertTrue(associatedSubjects.contains(new SubjectVersion(subject1, 1))); assertTrue(associatedSubjects.contains(new SubjectVersion(subject2, 1))); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, "1"), "Deleting Schema Version Success"); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, "1"), + "Deleting Schema Version Success"); associatedSubjects = confluentClient.getAllVersionsById(registeredSchemaId); assertEquals(associatedSubjects.size(), 1); assertTrue(associatedSubjects.contains(new SubjectVersion(subject1, 1))); - associatedSubjects = confluentClient.getAllVersionsById(RestService.DEFAULT_REQUEST_PROPERTIES, registeredSchemaId, null, true); + associatedSubjects = confluentClient.getAllVersionsById(RestService.DEFAULT_REQUEST_PROPERTIES, + registeredSchemaId, null, true); assertEquals(associatedSubjects.size(), 2); assertTrue(associatedSubjects.contains(new SubjectVersion(subject1, 1))); assertTrue(associatedSubjects.contains(new SubjectVersion(subject2, 1))); @@ -1057,7 +1144,8 @@ public void testCompatibilityNonExistentVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schema, subject); try { confluentClient.testCompatibility(schema, subject, "100"); - fail("Testing compatibility for missing version should fail with " + ErrorCode.VERSION_NOT_FOUND.value() + " (version not found)"); + fail("Testing compatibility for missing version should fail with " + + ErrorCode.VERSION_NOT_FOUND.value() + " (version not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } @@ -1070,7 +1158,8 @@ public void testCompatibilityInvalidVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schema, subject); try { confluentClient.testCompatibility(schema, subject, "earliest"); - fail("Testing compatibility for invalid version should fail with " + ErrorCode.VERSION_NOT_FOUND.value() + " (version not found)"); + fail("Testing compatibility for invalid version should fail with " + + ErrorCode.VERSION_NOT_FOUND.value() + " (version not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } @@ -1080,7 +1169,8 @@ public void testCompatibilityInvalidVersion() throws Exception { public void testGetConfigNonExistentSubject() throws Exception { try { confluentClient.getConfig("non-existent-subject"); - fail("Getting the configuration of a missing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " error code (subject not found)"); + fail("Getting the configuration of a missing subject should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " error code (subject not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Subject not found"); } @@ -1094,9 +1184,11 @@ public void testCanonicalization() throws Exception { int id = confluentClient.registerSchema(schema, subject); - assertEquals(id, confluentClient.registerSchema(schema, subject), "Registering the same schema should get back the same id"); + assertEquals(id, confluentClient.registerSchema(schema, subject), + "Registering the same schema should get back the same id"); - assertEquals(id, confluentClient.lookUpSubjectVersion(schema, subject).getId().intValue(), "Lookup the same schema should get back the same id"); + assertEquals(id, confluentClient.lookUpSubjectVersion(schema, subject).getId().intValue(), + "Lookup the same schema should get back the same id"); } @Test @@ -1107,34 +1199,42 @@ public void testDeleteSchemaVersionBasic() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"), "Deleting Schema Version Success"); + assertEquals((Integer) 2, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"), + "Deleting Schema Version Success"); assertEquals(Collections.singletonList(1), confluentClient.getAllVersions(subject)); try { confluentClient.getVersion(subject, 2); - fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, ErrorCode.VERSION_NOT_FOUND.value())); + fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.VERSION_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } try { RegisterSchemaRequest request = new RegisterSchemaRequest(); request.setSchema(schemas.get(1)); - confluentClient.lookUpSubjectVersion(RestService.DEFAULT_REQUEST_PROPERTIES, request, subject, false, false); - fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, ErrorCode.SCHEMA_NOT_FOUND.value())); + confluentClient.lookUpSubjectVersion(RestService.DEFAULT_REQUEST_PROPERTIES, request, subject, + false, false); + fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.SCHEMA_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), "Schema not found"); } - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "latest"), "Deleting Schema Version Success"); + assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, "latest"), "Deleting Schema Version Success"); try { List versions = confluentClient.getAllVersions(subject); - fail("Getting all versions from non-existing subject1 should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found). Got " + versions); + fail("Getting all versions from non-existing subject1 should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found). Got " + versions); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } - //re-register twice and versions should be same + // re-register twice and versions should be same for (int i = 0; i < 2; i++) { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); assertEquals(Collections.singletonList(3), confluentClient.getAllVersions(subject)); @@ -1150,73 +1250,88 @@ public void testDeleteSchemaVersionPermanent() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - //permanent delete without soft delete first + // permanent delete without soft delete first try { confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2", true); fail("Permanent deleting first time should throw schemaVersionNotSoftDeletedException"); } catch (RestClientException rce) { - assertEquals(ErrorCode.SCHEMA_VERSION_NOT_SOFT_DELETED.value(), rce.getErrorCode(), "Schema version must be soft deleted first"); + assertEquals(ErrorCode.SCHEMA_VERSION_NOT_SOFT_DELETED.value(), rce.getErrorCode(), + "Schema version must be soft deleted first"); } - //soft delete - assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"), "Deleting Schema Version Success"); + // soft delete + assertEquals((Integer) 2, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"), + "Deleting Schema Version Success"); assertEquals(Collections.singletonList(1), confluentClient.getAllVersions(subject)); - assertEquals(Arrays.asList(1, 2), confluentClient.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, subject, true)); - //soft delete again + assertEquals(Arrays.asList(1, 2), + confluentClient.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, subject, true)); + // soft delete again try { confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"); fail("Soft deleting second time should throw schemaVersionSoftDeletedException"); } catch (RestClientException rce) { - assertEquals(ErrorCode.SCHEMA_VERSION_SOFT_DELETED.value(), rce.getErrorCode(), "Schema version already soft deleted"); + assertEquals(ErrorCode.SCHEMA_VERSION_SOFT_DELETED.value(), rce.getErrorCode(), + "Schema version already soft deleted"); } try { confluentClient.getVersion(subject, 2); - fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, ErrorCode.VERSION_NOT_FOUND.value())); + fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.VERSION_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } - io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient.getVersion(subject, 2, true); + io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient + .getVersion(subject, 2, true); assertEquals((Integer) 2, schema.getVersion(), "Lookup Version Match"); try { RegisterSchemaRequest request = new RegisterSchemaRequest(); request.setSchema(schemas.get(1)); - confluentClient.lookUpSubjectVersion(RestService.DEFAULT_REQUEST_PROPERTIES, request, subject, false, false); - fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, ErrorCode.SCHEMA_NOT_FOUND.value())); + confluentClient.lookUpSubjectVersion(RestService.DEFAULT_REQUEST_PROPERTIES, request, subject, + false, false); + fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.SCHEMA_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), "Schema not found"); } // permanent delete - assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2", true), "Deleting Schema Version Success"); + assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, "2", true), "Deleting Schema Version Success"); // GET after permanent delete should give exception try { confluentClient.getVersion(subject, 2, true); - fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, ErrorCode.VERSION_NOT_FOUND.value())); + fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.VERSION_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } - //permanent delete again + // permanent delete again try { confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2", true); - fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, ErrorCode.VERSION_NOT_FOUND.value())); + fail(String.format("Getting Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.VERSION_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Version not found"); } - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "latest"), "Deleting Schema Version Success"); + assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, "latest"), "Deleting Schema Version Success"); try { List versions = confluentClient.getAllVersions(subject); - fail("Getting all versions from non-existing subject1 should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found). Got " + versions); + fail("Getting all versions from non-existing subject1 should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found). Got " + versions); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } - //re-register twice and versions should be same - //after permanent delete of 2, the new version coming up will be 2 + // re-register twice and versions should be same + // after permanent delete of 2, the new version coming up will be 2 for (int i = 0; i < 2; i++) { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); assertEquals(Collections.singletonList(2), confluentClient.getAllVersions(subject)); @@ -1229,7 +1344,8 @@ public void testDeleteSchemaVersionInvalidSubject() throws Exception { try { String subject = "testDeleteSchemaVersionInvalidSubject"; confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"); - fail("Deleting a non existent subject version should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " error code (subject not found)"); + fail("Deleting a non existent subject version should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " error code (subject not found)"); } catch (RestClientException rce) { assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Subject not found"); } @@ -1243,21 +1359,27 @@ public void testDeleteLatestVersion() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "latest"), "Deleting Schema Version Success"); + assertEquals((Integer) 2, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, "latest"), "Deleting Schema Version Success"); - io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient.getLatestVersion(subject); + io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient + .getLatestVersion(subject); assertEquals(schemas.get(0), schema.getSchema(), "Latest Version Schema"); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "latest"), "Deleting Schema Version Success"); + assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, "latest"), "Deleting Schema Version Success"); try { confluentClient.getLatestVersion(subject); - fail("Getting latest versions from non-existing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)."); + fail("Getting latest versions from non-existing subject should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)."); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(2), subject); - assertEquals(schemas.get(2), confluentClient.getLatestVersion(subject).getSchema(), "Latest version available after subject re-registration"); + assertEquals(schemas.get(2), confluentClient.getLatestVersion(subject).getSchema(), + "Latest version available after subject re-registration"); } @Test @@ -1266,9 +1388,11 @@ public void testGetLatestVersionNonExistentSubject() throws Exception { try { confluentClient.getLatestVersion(subject); - fail("Getting latest versions from non-existing subject should fail with " + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)."); + fail("Getting latest versions from non-existing subject should fail with " + + ErrorCode.SUBJECT_NOT_FOUND.value() + " (subject not found)."); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject"); + assertEquals(ErrorCode.SUBJECT_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject"); } } @@ -1280,10 +1404,14 @@ public void testGetLatestVersionDeleteOlder() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), "Latest Version Schema"); + assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), + "Latest Version Schema"); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"), "Deleting Schema Older Version Success"); - assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), "Latest Version Schema Still Same"); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"), + "Deleting Schema Older Version Success"); + assertEquals(schemas.get(1), confluentClient.getLatestVersion(subject).getSchema(), + "Latest Version Schema Still Same"); } @Test @@ -1295,7 +1423,8 @@ public void testDeleteInvalidVersion() throws Exception { try { confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"); } catch (RestClientException rce) { - assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), "Should get a 404 status for non-existing subject version"); + assertEquals(ErrorCode.VERSION_NOT_FOUND.value(), rce.getErrorCode(), + "Should get a 404 status for non-existing subject version"); } } @@ -1307,18 +1436,22 @@ public void testDeleteWithLookup() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"), "Deleting Schema Version Success"); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"), + "Deleting Schema Version Success"); try { confluentClient.lookUpSubjectVersion(schemas.get(0), subject, false); - fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, ErrorCode.SCHEMA_NOT_FOUND.value())); + fail(String.format("Lookup Subject Version %s for subject %s should fail with %s", "2", subject, + ErrorCode.SCHEMA_NOT_FOUND.value())); } catch (RestClientException rce) { assertEquals(ErrorCode.SCHEMA_NOT_FOUND.value(), rce.getErrorCode(), "Schema not found"); } - //verify deleted schema - io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient.lookUpSubjectVersion(schemas.get(0), subject, true); + // verify deleted schema + io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient + .lookUpSubjectVersion(schemas.get(0), subject, true); assertEquals((Integer) 1, schema.getVersion(), "Lookup Version Match"); - //re-register schema again and verify we get latest version + // re-register schema again and verify we get latest version ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(0), subject); schema = confluentClient.lookUpSubjectVersion(schemas.get(0), subject, true); assertEquals((Integer) 3, schema.getVersion(), "Lookup Version Match"); @@ -1331,13 +1464,18 @@ public void testIncompatibleSchemaLookupBySubjectAfterDelete() throws Exception String subject = "testIncompatibleSchemaLookupBySubjectAfterDelete"; // Make two incompatible schemas - field 'g' has different types - String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; + String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; String schema1 = new AvroSchema(schema1String).canonicalString(); - String wrongSchema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + "\"g\" , \"default\":\"d\"}" + "]}"; + String wrongSchema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + + "\"g\" , \"default\":\"d\"}" + "]}"; String wrongSchema2 = new AvroSchema(wrongSchema2String).canonicalString(); - String correctSchema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"int\",\"name\":" + "\"g\" , \"default\":0}" + "]}"; + String correctSchema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"int\",\"name\":" + + "\"g\" , \"default\":0}" + "]}"; String correctSchema2 = new AvroSchema(correctSchema2String).canonicalString(); // ensure registering incompatible schemas will raise an error confluentClient.updateCompatibility(CompatibilityLevel.BACKWARD.name, subject); @@ -1366,7 +1504,9 @@ public void testIncompatibleSchemaLookupBySubjectAfterDelete() throws Exception confluentClient.registerSchema(correctSchema2, subject, true); - assertEquals((Integer) 3, confluentClient.lookUpSubjectVersion(correctSchema2String, subject, true, false).getVersion(), "Version is same"); + assertEquals((Integer) 3, + confluentClient.lookUpSubjectVersion(correctSchema2String, subject, true, false).getVersion(), + "Version is same"); } @@ -1374,10 +1514,13 @@ public void testIncompatibleSchemaLookupBySubjectAfterDelete() throws Exception public void testSubjectCompatibilityAfterDeletingAllVersions() throws Exception { String subject = "testSubjectCompatibilityAfterDeletingAllVersions"; - String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; + String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; String schema1 = new AvroSchema(schema1String).canonicalString(); - String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + "\"g\" , \"default\":\"d\"}" + "]}"; + String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + + "\"g\" , \"default\":\"d\"}" + "]}"; String schema2 = new AvroSchema(schema2String).canonicalString(); confluentClient.updateCompatibility(CompatibilityLevel.FULL.name, null); @@ -1387,15 +1530,19 @@ public void testSubjectCompatibilityAfterDeletingAllVersions() throws Exception confluentClient.registerSchema(schema2, subject); confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1"); - assertEquals(BACKWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), "Compatibility Level Exists"); - assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Top Compatibility Level Exists"); + assertEquals(BACKWARD.name, confluentClient.getConfig(subject).getCompatibilityLevel(), + "Compatibility Level Exists"); + assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Top Compatibility Level Exists"); confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject, "2"); try { confluentClient.getConfig(subject); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_COMPATIBILITY_NOT_CONFIGURED.value(), rce.getErrorCode(), "Compatibility Level doesn't exist"); + assertEquals(ErrorCode.SUBJECT_COMPATIBILITY_NOT_CONFIGURED.value(), rce.getErrorCode(), + "Compatibility Level doesn't exist"); } - assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Top Compatibility Level Exists"); + assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Top Compatibility Level Exists"); } @@ -1413,7 +1560,9 @@ public void testListSubjects() throws Exception { assertEquals(expectedResponse, confluentClient.getAllSubjects(), "Current Subjects"); List deletedResponse = new ArrayList<>(); deletedResponse.add(1); - assertEquals(deletedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject2), "Versions Deleted Match"); + assertEquals(deletedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject2), + "Versions Deleted Match"); expectedResponse = new ArrayList<>(); expectedResponse.add(subject1); @@ -1424,7 +1573,9 @@ public void testListSubjects() throws Exception { expectedResponse.add(subject2); assertEquals(expectedResponse, confluentClient.getAllSubjects(true), "Current Subjects"); - assertEquals(deletedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, true), "Versions Deleted Match"); + assertEquals(deletedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, true), + "Versions Deleted Match"); expectedResponse = new ArrayList<>(); expectedResponse.add(subject1); @@ -1440,14 +1591,21 @@ public void testListSoftDeletedSubjectsAndSchemas() throws Exception { ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(1), subject1); ConfluentTestUtils.registerAndVerifySchema(confluentClient, schemas.get(2), subject2); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject1, "1")); - assertEquals((Integer) 1, confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, "1")); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject1, "1")); + assertEquals((Integer) 1, + confluentClient.deleteSchemaVersion(RestService.DEFAULT_REQUEST_PROPERTIES, subject2, "1")); - assertEquals(Collections.singletonList(2), confluentClient.getAllVersions(subject1), "List All Versions Match"); - assertEquals(Arrays.asList(1, 2), confluentClient.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, subject1, true), "List All Versions Include deleted Match"); + assertEquals(Collections.singletonList(2), confluentClient.getAllVersions(subject1), + "List All Versions Match"); + assertEquals(Arrays.asList(1, 2), + confluentClient.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, subject1, true), + "List All Versions Include deleted Match"); - assertEquals(Collections.singletonList(subject1), confluentClient.getAllSubjects(), "List All Subjects Match"); - assertEquals(Arrays.asList(subject1, subject2), confluentClient.getAllSubjects(true), "List All Subjects Include deleted Match"); + assertEquals(Collections.singletonList(subject1), confluentClient.getAllSubjects(), + "List All Subjects Match"); + assertEquals(Arrays.asList(subject1, subject2), confluentClient.getAllSubjects(true), + "List All Subjects Include deleted Match"); } @Test @@ -1459,7 +1617,9 @@ public void testDeleteSubjectBasic() throws Exception { List expectedResponse = new ArrayList<>(); expectedResponse.add(1); expectedResponse.add(2); - assertEquals(expectedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), "Versions Deleted Match"); + assertEquals(expectedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), + "Versions Deleted Match"); try { confluentClient.getLatestVersion(subject); fail(String.format("Subject %s should not be found", subject)); @@ -1478,9 +1638,12 @@ public void testDeleteSubjectException() throws Exception { List expectedResponse = new ArrayList<>(); expectedResponse.add(1); expectedResponse.add(2); - assertEquals(expectedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), "Versions Deleted Match"); + assertEquals(expectedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), + "Versions Deleted Match"); - io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient.lookUpSubjectVersion(schemas.get(0), subject, true); + io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient + .lookUpSubjectVersion(schemas.get(0), subject, true); assertEquals(1, (long) schema.getVersion()); schema = confluentClient.lookUpSubjectVersion(schemas.get(1), subject, true); assertEquals(2, (long) schema.getVersion()); @@ -1489,11 +1652,11 @@ public void testDeleteSubjectException() throws Exception { confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject); fail(String.format("Subject %s should not be found", subject)); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_SOFT_DELETED.value(), rce.getErrorCode(), "Subject exists in soft deleted format."); + assertEquals(ErrorCode.SUBJECT_SOFT_DELETED.value(), rce.getErrorCode(), + "Subject exists in soft deleted format."); } } - @Test public void testDeleteSubjectPermanent() throws Exception { List schemas = ConfluentTestUtils.getRandomCanonicalAvroString(2); @@ -1508,17 +1671,23 @@ public void testDeleteSubjectPermanent() throws Exception { confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject, true); fail("Delete permanent should not succeed"); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_NOT_SOFT_DELETED.value(), rce.getErrorCode(), "Subject '%s' was not deleted first before permanent delete"); + assertEquals(ErrorCode.SUBJECT_NOT_SOFT_DELETED.value(), rce.getErrorCode(), + "Subject '%s' was not deleted first before permanent delete"); } - assertEquals(expectedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), "Versions Deleted Match"); + assertEquals(expectedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject), + "Versions Deleted Match"); - io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient.lookUpSubjectVersion(schemas.get(0), subject, true); + io.confluent.kafka.schemaregistry.client.rest.entities.Schema schema = confluentClient + .lookUpSubjectVersion(schemas.get(0), subject, true); assertEquals(1, (long) schema.getVersion()); schema = confluentClient.lookUpSubjectVersion(schemas.get(1), subject, true); assertEquals(2, (long) schema.getVersion()); - assertEquals(expectedResponse, confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject, true), "Versions Deleted Match"); + assertEquals(expectedResponse, + confluentClient.deleteSubject(RestService.DEFAULT_REQUEST_PROPERTIES, subject, true), + "Versions Deleted Match"); for (Integer i : expectedResponse) { try { confluentClient.lookUpSubjectVersion(schemas.get(0), subject, false); @@ -1540,10 +1709,13 @@ public void testDeleteSubjectPermanent() throws Exception { public void testSubjectCompatibilityAfterDeletingSubject() throws Exception { String subject = "testSubjectCompatibilityAfterDeletingSubject"; - String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; + String schema1String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}]}"; String schema1 = new AvroSchema(schema1String).canonicalString(); - String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + "\"g\" , \"default\":\"d\"}" + "]}"; + String schema2String = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + "\"}," + "{\"type\":\"string\",\"name\":" + + "\"g\" , \"default\":\"d\"}" + "]}"; String schema2 = new AvroSchema(schema2String).canonicalString(); confluentClient.updateCompatibility(CompatibilityLevel.FULL.name, null); @@ -1556,9 +1728,11 @@ public void testSubjectCompatibilityAfterDeletingSubject() throws Exception { try { confluentClient.getConfig(subject); } catch (RestClientException rce) { - assertEquals(ErrorCode.SUBJECT_COMPATIBILITY_NOT_CONFIGURED.value(), rce.getErrorCode(), "Compatibility Level doesn't exist"); + assertEquals(ErrorCode.SUBJECT_COMPATIBILITY_NOT_CONFIGURED.value(), rce.getErrorCode(), + "Compatibility Level doesn't exist"); } - assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), "Top Compatibility Level Exists"); + assertEquals(FULL.name, confluentClient.getConfig(null).getCompatibilityLevel(), + "Top Compatibility Level Exists"); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentTestUtils.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentTestUtils.java index 3b7c9c8342..5f2b1c9a0e 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentTestUtils.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/ConfluentTestUtils.java @@ -17,39 +17,46 @@ public class ConfluentTestUtils { private static final Random random = new Random(); - /** * Helper method which checks the number of versions registered under the given subject. */ - public static void checkNumberOfVersions(RestService restService, int expected, String subject) throws IOException, RestClientException { + public static void checkNumberOfVersions(RestService restService, int expected, String subject) + throws IOException, RestClientException { List versions = restService.getAllVersions(subject); - assertEquals("Expected " + expected + " registered versions under subject " + subject + ", but found " + versions.size(), expected, versions.size()); + assertEquals("Expected " + expected + " registered versions under subject " + subject + ", but found " + + versions.size(), expected, versions.size()); } /** * Register a new schema and verify that it can be found on the expected version. */ - public static int registerAndVerifySchema(RestService restService, String schemaString, String subject) throws IOException, RestClientException { + public static int registerAndVerifySchema(RestService restService, String schemaString, String subject) + throws IOException, RestClientException { int registeredId = restService.registerSchema(schemaString, subject); // the newly registered schema should be immediately readable on the leader - assertEquals("Registered schema should be found", schemaString, restService.getId(registeredId).getSchemaString()); + assertEquals("Registered schema should be found", schemaString, + restService.getId(registeredId).getSchemaString()); return registeredId; } - public static void registerAndVerifySchema(RestService restService, String schemaString, List references, String subject) throws IOException, RestClientException { - int registeredId = restService.registerSchema(schemaString, AvroSchema.TYPE, references, subject).getId(); + public static void registerAndVerifySchema(RestService restService, String schemaString, + List references, String subject) throws IOException, RestClientException { + int registeredId = restService.registerSchema(schemaString, AvroSchema.TYPE, references, subject) + .getId(); // the newly registered schema should be immediately readable on the leader - assertEquals("Registered schema should be found", schemaString, restService.getId(registeredId).getSchemaString()); + assertEquals("Registered schema should be found", schemaString, + restService.getId(registeredId).getSchemaString()); } public static List getRandomCanonicalAvroString(int num) { List avroStrings = new ArrayList(); for (int i = 0; i < num; i++) { - String schemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + random.nextInt(Integer.MAX_VALUE) + "\"}]}"; + String schemaString = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + random.nextInt(Integer.MAX_VALUE) + "\"}]}"; avroStrings.add(new AvroSchema(ContentHandle.create(schemaString).content()).canonicalString()); } return avroStrings; @@ -57,15 +64,17 @@ public static List getRandomCanonicalAvroString(int num) { public static List getAvroSchemaWithReferences() { List schemas = new ArrayList<>(); - String reference = "{\"type\":\"record\"," + "\"name\":\"Subrecord\"," + "\"namespace\":\"otherns\"," + "\"fields\":" + "[{\"name\":\"field2\",\"type\":\"string\"}]}"; + String reference = "{\"type\":\"record\"," + "\"name\":\"Subrecord\"," + "\"namespace\":\"otherns\"," + + "\"fields\":" + "[{\"name\":\"field2\",\"type\":\"string\"}]}"; schemas.add(reference); - String schemaString = "{\"type\":\"record\"," + "\"name\":\"MyRecord\"," + "\"namespace\":\"ns\"," + "\"fields\":" + "[{\"name\":\"field1\",\"type\":\"otherns.Subrecord\"}]}"; + String schemaString = "{\"type\":\"record\"," + "\"name\":\"MyRecord\"," + "\"namespace\":\"ns\"," + + "\"fields\":" + "[{\"name\":\"field1\",\"type\":\"otherns.Subrecord\"}]}"; schemas.add(schemaString); return schemas; } - public static String getBadSchema() { - return "{\"type\":\"bad-record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":" + "\"f" + random.nextInt(Integer.MAX_VALUE) + "\"}]}"; + return "{\"type\":\"bad-record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":" + "\"f" + random.nextInt(Integer.MAX_VALUE) + "\"}]}"; } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/SubjectsResourceTest.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/SubjectsResourceTest.java index ada63dcf58..fffafeb370 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/SubjectsResourceTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/SubjectsResourceTest.java @@ -1,23 +1,19 @@ package io.apicurio.registry.noprofile.ccompat.rest.v7; -import static io.restassured.RestAssured.given; -import static org.hamcrest.CoreMatchers.anything; - -import org.junit.jupiter.api.Test; - import io.apicurio.registry.AbstractResourceTestBase; import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.anything; @QuarkusTest public class SubjectsResourceTest extends AbstractResourceTestBase { @Test public void testListSubjectsEndpoint() { - given() - .when().contentType(CT_JSON).get("/ccompat/v7/subjects") - .then() - .statusCode(200) - .body(anything()); + given().when().contentType(CT_JSON).get("/ccompat/v7/subjects").then().statusCode(200) + .body(anything()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/UpdateState.java b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/UpdateState.java index 2b715a08fe..9aa09a66cf 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/UpdateState.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/ccompat/rest/v7/UpdateState.java @@ -4,47 +4,27 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; import com.fasterxml.jackson.annotation.JsonPropertyOrder; - import io.apicurio.registry.types.ArtifactState; - /** * Root Type for UpdateState *

- * - * */ @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonPropertyOrder({ - "state" -}) +@JsonPropertyOrder({ "state" }) public class UpdateState { /** - * Describes the state of an artifact or artifact version. The following states - * are possible: - * - * * ENABLED - * * DISABLED - * * DEPRECATED - * - * (Required) - * + * Describes the state of an artifact or artifact version. The following states are possible: * ENABLED * + * DISABLED * DEPRECATED (Required) */ @JsonProperty("state") @JsonPropertyDescription("Describes the state of an artifact or artifact version. The following states\nare possible:\n\n* ENABLED\n* DISABLED\n* DEPRECATED\n") private ArtifactState state; /** - * Describes the state of an artifact or artifact version. The following states - * are possible: - * - * * ENABLED - * * DISABLED - * * DEPRECATED - * - * (Required) - * + * Describes the state of an artifact or artifact version. The following states are possible: * ENABLED * + * DISABLED * DEPRECATED (Required) */ @JsonProperty("state") public ArtifactState getState() { @@ -52,15 +32,8 @@ public ArtifactState getState() { } /** - * Describes the state of an artifact or artifact version. The following states - * are possible: - * - * * ENABLED - * * DISABLED - * * DEPRECATED - * - * (Required) - * + * Describes the state of an artifact or artifact version. The following states are possible: * ENABLED * + * DISABLED * DEPRECATED (Required) */ @JsonProperty("state") public void setState(ArtifactState state) { @@ -69,8 +42,6 @@ public void setState(ArtifactState state) { @Override public String toString() { - return "UpdateState{" + - "state=" + state + - '}'; + return "UpdateState{" + "state=" + state + '}'; } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/compatibility/CompatibilityRuleApplicationTest.java b/app/src/test/java/io/apicurio/registry/noprofile/compatibility/CompatibilityRuleApplicationTest.java index c140ebb2aa..5e0255647a 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/compatibility/CompatibilityRuleApplicationTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/compatibility/CompatibilityRuleApplicationTest.java @@ -40,90 +40,41 @@ private static TypedContent toTypedContent(String schema) { } private static final String SCHEMA_SIMPLE = "{\"type\": \"string\"}"; - private static final String SCHEMA_WITH_MAP = "{\r\n" + - " \"type\": \"record\",\r\n" + - " \"name\": \"userInfo\",\r\n" + - " \"namespace\": \"my.example\",\r\n" + - " \"fields\": [\r\n" + - " {\r\n" + - " \"name\": \"name\",\r\n" + - " \"type\": \"string\",\r\n" + - " \"default\": \"NONE\"\r\n" + - " },\r\n" + - " {\r\n" + - " \"name\": \"props\",\r\n" + - " \"type\": {\r\n" + - " \"type\": \"map\",\r\n" + - " \"values\": \"string\"\r\n" + - " }\r\n" + - " }\r\n" + - " ]\r\n" + - "}"; - private static final String INVALID_SCHEMA_WITH_MAP = "{\r\n" + - " \"type\": \"record\",\r\n" + - " \"name\": \"userInfo\",\r\n" + - " \"namespace\": \"my.example\",\r\n" + - " \"fields\": [\r\n" + - " {\r\n" + - " \"name\": \"name\",\r\n" + - " \"type\": \"string\",\r\n" + - " \"default\": \"NONE\"\r\n" + - " },\r\n" + - " {\r\n" + - " \"name\": \"props\",\r\n" + - " \"type\": {\r\n" + - " \"type\": \"map\",\r\n" + - " \"values\": \"string\"\r\n" + - " },\r\n" + - " \"default\": \"{}\"\r\n" + - " }\r\n" + - " ]\r\n" + - "}"; - - private static final String citizenSchema = "{\n" + - " \"$id\": \"https://example.com/citizen.schema.json\",\n" + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + - " \"title\": \"Citizen\",\n" + - " \"type\": \"object\",\n" + - " \"properties\": {\n" + - " \"firstName\": {\n" + - " \"type\": \"string\",\n" + - " \"description\": \"The citizen's first name.\"\n" + - " },\n" + - " \"lastName\": {\n" + - " \"type\": \"string\",\n" + - " \"description\": \"The citizen's last name.\"\n" + - " },\n" + - " \"age\": {\n" + - " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + - " \"type\": \"integer\",\n" + - " \"minimum\": 0\n" + - " },\n" + - " \"city\": {\n" + - " \"$ref\": \"city.json\"\n" + - " }\n" + - " },\n" + - " \"required\": [\n" + - " \"city\"\n" + - " ]\n" + - "}"; - private static final String citySchema = "{\n" + - " \"$id\": \"https://example.com/city.schema.json\",\n" + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + - " \"title\": \"City\",\n" + - " \"type\": \"object\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"string\",\n" + - " \"description\": \"The city's name.\"\n" + - " },\n" + - " \"zipCode\": {\n" + - " \"type\": \"integer\",\n" + - " \"description\": \"The zip code.\",\n" + - " \"minimum\": 0\n" + - " }\n" + - " }\n" + - "}"; + private static final String SCHEMA_WITH_MAP = "{\r\n" + " \"type\": \"record\",\r\n" + + " \"name\": \"userInfo\",\r\n" + " \"namespace\": \"my.example\",\r\n" + + " \"fields\": [\r\n" + " {\r\n" + " \"name\": \"name\",\r\n" + + " \"type\": \"string\",\r\n" + " \"default\": \"NONE\"\r\n" + + " },\r\n" + " {\r\n" + " \"name\": \"props\",\r\n" + + " \"type\": {\r\n" + " \"type\": \"map\",\r\n" + + " \"values\": \"string\"\r\n" + " }\r\n" + " }\r\n" + + " ]\r\n" + "}"; + private static final String INVALID_SCHEMA_WITH_MAP = "{\r\n" + " \"type\": \"record\",\r\n" + + " \"name\": \"userInfo\",\r\n" + " \"namespace\": \"my.example\",\r\n" + + " \"fields\": [\r\n" + " {\r\n" + " \"name\": \"name\",\r\n" + + " \"type\": \"string\",\r\n" + " \"default\": \"NONE\"\r\n" + + " },\r\n" + " {\r\n" + " \"name\": \"props\",\r\n" + + " \"type\": {\r\n" + " \"type\": \"map\",\r\n" + + " \"values\": \"string\"\r\n" + " },\r\n" + + " \"default\": \"{}\"\r\n" + " }\r\n" + " ]\r\n" + "}"; + + private static final String citizenSchema = "{\n" + + " \"$id\": \"https://example.com/citizen.schema.json\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + " \"title\": \"Citizen\",\n" + + " \"type\": \"object\",\n" + " \"properties\": {\n" + " \"firstName\": {\n" + + " \"type\": \"string\",\n" + " \"description\": \"The citizen's first name.\"\n" + + " },\n" + " \"lastName\": {\n" + " \"type\": \"string\",\n" + + " \"description\": \"The citizen's last name.\"\n" + " },\n" + " \"age\": {\n" + + " \"description\": \"Age in years which must be equal to or greater than zero.\",\n" + + " \"type\": \"integer\",\n" + " \"minimum\": 0\n" + " },\n" + " \"city\": {\n" + + " \"$ref\": \"city.json\"\n" + " }\n" + " },\n" + " \"required\": [\n" + + " \"city\"\n" + " ]\n" + "}"; + private static final String citySchema = "{\n" + " \"$id\": \"https://example.com/city.schema.json\",\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + " \"title\": \"City\",\n" + + " \"type\": \"object\",\n" + " \"properties\": {\n" + " \"name\": {\n" + + " \"type\": \"string\",\n" + " \"description\": \"The city's name.\"\n" + " },\n" + + " \"zipCode\": {\n" + " \"type\": \"integer\",\n" + + " \"description\": \"The zip code.\",\n" + " \"minimum\": 0\n" + " }\n" + " }\n" + + "}"; private static final CreateArtifact createArtifact = new CreateArtifact(); static { @@ -177,55 +128,36 @@ public void testJsonSchemaCompatibility() { String v1Schema = JsonSchemas.jsonSchema; String v2Schema = JsonSchemas.incompatibleJsonSchema; - RuleViolationException ruleViolationException = Assertions.assertThrows(RuleViolationException.class, () -> { - RuleContext context = new RuleContext("TestGroup", "TestJson", ArtifactType.JSON, "FORWARD_TRANSITIVE", - Collections.singletonList(toTypedContent(v1Schema)), toTypedContent(v2Schema), - Collections.emptyList(), Collections.emptyMap()); - compatibility.execute(context); - }); + RuleViolationException ruleViolationException = Assertions.assertThrows(RuleViolationException.class, + () -> { + RuleContext context = new RuleContext("TestGroup", "TestJson", ArtifactType.JSON, + "FORWARD_TRANSITIVE", Collections.singletonList(toTypedContent(v1Schema)), + toTypedContent(v2Schema), Collections.emptyList(), Collections.emptyMap()); + compatibility.execute(context); + }); Set ruleViolationCauses = ruleViolationException.getCauses(); RuleViolation ageViolationCause = findCauseByContext(ruleViolationCauses, "/properties/age/type"); RuleViolation zipCodeViolationCause = findCauseByContext(ruleViolationCauses, "/properties/zipcode"); - /* Explanation for why the following diff type is not SUBSCHEMA_TYPE_CHANGED: + /* + * Explanation for why the following diff type is not SUBSCHEMA_TYPE_CHANGED: * - * Consider the following schemas, with FORWARD compatibility checking - * (i.e. B is newer, but is checked in a reverse order): - * A: - * ``` - * { - * "type": "object", - * "properties": { - * "age": { - * "type": "integer", - * "minimum": 0 - * } - * } - * } - * ``` - * B: - * ``` - * { - * "type": "object", - * "properties": { - * "age": { - * "type": "string", - * "minimum": 0 - * } - * } - * } - * ``` - * A is incompatible with B, because the `type` property has been changed from `string` to `integer`, - * however the `minimum` property, which is found in number schemas remained in B. - * The Everit library parses subschema of the `age` property in B not as a string schema with an extra property, - * but as a "synthetic" allOf combined schema of string and number. - * The compatibility checking then compares this synthetic number subschema to the number schema in A. + * Consider the following schemas, with FORWARD compatibility checking (i.e. B is newer, but is + * checked in a reverse order): A: ``` { "type": "object", "properties": { "age": { "type": "integer", + * "minimum": 0 } } } ``` B: ``` { "type": "object", "properties": { "age": { "type": "string", + * "minimum": 0 } } } ``` A is incompatible with B, because the `type` property has been changed from + * `string` to `integer`, however the `minimum` property, which is found in number schemas remained in + * B. The Everit library parses subschema of the `age` property in B not as a string schema with an + * extra property, but as a "synthetic" allOf combined schema of string and number. The compatibility + * checking then compares this synthetic number subschema to the number schema in A. */ Assertions.assertEquals("/properties/age/type", ageViolationCause.getContext()); - Assertions.assertEquals(DiffType.NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE.getDescription(), ageViolationCause.getDescription()); + Assertions.assertEquals(DiffType.NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE.getDescription(), + ageViolationCause.getDescription()); Assertions.assertEquals("/properties/zipcode", zipCodeViolationCause.getContext()); - Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), zipCodeViolationCause.getDescription()); + Assertions.assertEquals(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription(), + zipCodeViolationCause.getDescription()); } @@ -234,7 +166,8 @@ public void validateJsonSchemaEvolutionWithReferences() throws Exception { String groupId = TestUtils.generateGroupId(); String cityArtifactId = generateArtifactId(); - /*final Integer cityDependencyGlobalId = */createArtifact(groupId, cityArtifactId, ArtifactType.JSON, citySchema, ContentTypes.APPLICATION_JSON); + /* final Integer cityDependencyGlobalId = */createArtifact(groupId, cityArtifactId, ArtifactType.JSON, + citySchema, ContentTypes.APPLICATION_JSON); final io.apicurio.registry.rest.v3.beans.ArtifactReference cityReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); cityReference.setVersion("1"); @@ -244,12 +177,15 @@ public void validateJsonSchemaEvolutionWithReferences() throws Exception { String artifactId = generateArtifactId(); - /*final Integer globalId = */createArtifactWithReferences(groupId, artifactId, ArtifactType.JSON, citizenSchema, ContentTypes.APPLICATION_JSON, List.of(cityReference)); + /* final Integer globalId = */createArtifactWithReferences(groupId, artifactId, ArtifactType.JSON, + citizenSchema, ContentTypes.APPLICATION_JSON, List.of(cityReference)); - createArtifactRule(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); + createArtifactRule(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, + "BACKWARD"); - //Try to create another version, it should be validated with no issues. - createArtifactVersionExtendedRaw(groupId, artifactId, citizenSchema, ContentTypes.APPLICATION_JSON, List.of(cityReference)); + // Try to create another version, it should be validated with no issues. + createArtifactVersionExtendedRaw(groupId, artifactId, citizenSchema, ContentTypes.APPLICATION_JSON, + List.of(cityReference)); } private RuleViolation findCauseByContext(Set ruleViolations, String context) { @@ -268,7 +204,8 @@ public void testCompatibilityRuleApplication_Map() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.FULL.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().post(createRule); // This will result in org.apache.avro.AvroTypeException in the compatibility checker, // which is rethrown as UnprocessableSchemaException. @@ -286,7 +223,8 @@ public void testCompatibilityInvalidExitingContentRuleApplication_Map() throws E CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.FULL.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().post(createRule); // This will result in org.apache.avro.AvroTypeException in the compatibility checker, // which is rethrown as UnprocessableSchemaException. @@ -297,34 +235,35 @@ public void testCompatibilityInvalidExitingContentRuleApplication_Map() throws E Assertions.assertEquals(422, exception.getResponseStatusCode()); } - @Test public void testCompatibilityRuleApplication_FullTransitive() throws Exception { String artifactId = "testCompatibilityRuleApplication_FullTransitive"; - //Create artifact with 4 versions, where the first one is not compatible with the others + // Create artifact with 4 versions, where the first one is not compatible with the others createArtifact(artifactId, ArtifactType.AVRO, SCHEMA_SIMPLE, ContentTypes.APPLICATION_JSON); createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); - //Activate compatibility rules + // Activate compatibility rules CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.BACKWARD_TRANSITIVE.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().post(createRule); - //Should fail, the new version is not compatible with the first one + // Should fail, the new version is not compatible with the first one Assertions.assertThrows(Exception.class, () -> { createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); }); - //Change rule to backward, should pass since the new version is compatible with the latest one + // Change rule to backward, should pass since the new version is compatible with the latest one Rule rule = new Rule(); rule.setRuleType(RuleType.COMPATIBILITY); rule.setConfig(CompatibilityLevel.BACKWARD.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.getValue()).put(rule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.getValue()).put(rule); createArtifactVersion(artifactId, SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/content/ContentCanonicalizerTest.java b/app/src/test/java/io/apicurio/registry/noprofile/content/ContentCanonicalizerTest.java index 8de94b5023..57d4b9ea7a 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/content/ContentCanonicalizerTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/content/ContentCanonicalizerTest.java @@ -9,9 +9,10 @@ import io.apicurio.registry.types.provider.ArtifactTypeUtilProvider; import io.apicurio.registry.types.provider.ArtifactTypeUtilProviderFactory; import io.quarkus.test.junit.QuarkusTest; +import jakarta.inject.Inject; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import jakarta.inject.Inject; + import java.util.Collections; @QuarkusTest @@ -36,20 +37,13 @@ private ContentCanonicalizer getContentCanonicalizer(String type) { @Test void testOpenAPI() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.OPENAPI); - - String before = "{\r\n" + - " \"openapi\": \"3.0.2\",\r\n" + - " \"info\": {\r\n" + - " \"title\": \"Empty 3.0 API\",\r\n" + - " \"version\": \"1.0.0\"\r\n" + - " },\r\n" + - " \"paths\": {\r\n" + - " \"/\": {}\r\n" + - " },\r\n" + - " \"components\": {}\r\n" + - "}"; + + String before = "{\r\n" + " \"openapi\": \"3.0.2\",\r\n" + " \"info\": {\r\n" + + " \"title\": \"Empty 3.0 API\",\r\n" + " \"version\": \"1.0.0\"\r\n" + + " },\r\n" + " \"paths\": {\r\n" + " \"/\": {}\r\n" + " },\r\n" + + " \"components\": {}\r\n" + "}"; String expected = "{\"components\":{},\"info\":{\"title\":\"Empty 3.0 API\",\"version\":\"1.0.0\"},\"openapi\":\"3.0.2\",\"paths\":{\"/\":{}}}"; - + TypedContent content = toTypedContent(before); String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); Assertions.assertEquals(expected, actual); @@ -58,17 +52,12 @@ void testOpenAPI() { @Test void testAvro() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.AVRO); - - String before = "{\r\n" + - " \"type\": \"record\",\r\n" + - " \"namespace\": \"com.example\",\r\n" + - " \"name\": \"FullName\",\r\n" + - " \"fields\": [\r\n" + - " { \"name\": \"first\", \"type\": \"string\" },\r\n" + - " { \"name\": \"middle\", \"type\": \"string\" },\r\n" + - " { \"name\": \"last\", \"type\": \"string\" }\r\n" + - " ]\r\n" + - "} "; + + String before = "{\r\n" + " \"type\": \"record\",\r\n" + + " \"namespace\": \"com.example\",\r\n" + " \"name\": \"FullName\",\r\n" + + " \"fields\": [\r\n" + " { \"name\": \"first\", \"type\": \"string\" },\r\n" + + " { \"name\": \"middle\", \"type\": \"string\" },\r\n" + + " { \"name\": \"last\", \"type\": \"string\" }\r\n" + " ]\r\n" + "} "; String expected = "{\"type\":\"record\",\"name\":\"FullName\",\"namespace\":\"com.example\",\"doc\":\"\",\"fields\":[{\"name\":\"first\",\"type\":\"string\",\"doc\":\"\"},{\"name\":\"middle\",\"type\":\"string\",\"doc\":\"\"},{\"name\":\"last\",\"type\":\"string\",\"doc\":\"\"}]}"; TypedContent content = toTypedContent(before); @@ -80,20 +69,11 @@ void testAvro() { void testProtobuf() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.PROTOBUF); - String before = "message SearchRequest {\r\n" + - " required string query = 1;\r\n" + - " optional int32 page_number = 2;\r\n" + - " optional int32 result_per_page = 3;\r\n" + - "}"; - String expected = "// Proto schema formatted by Wire, do not edit.\n" - + "// Source: \n" - + "\n" - + "message SearchRequest {\n" - + " required string query = 1;\n" - + "\n" - + " optional int32 page_number = 2;\n" - + "\n" - + " optional int32 result_per_page = 3;\n" + String before = "message SearchRequest {\r\n" + " required string query = 1;\r\n" + + " optional int32 page_number = 2;\r\n" + " optional int32 result_per_page = 3;\r\n" + "}"; + String expected = "// Proto schema formatted by Wire, do not edit.\n" + "// Source: \n" + "\n" + + "message SearchRequest {\n" + " required string query = 1;\n" + "\n" + + " optional int32 page_number = 2;\n" + "\n" + " optional int32 result_per_page = 3;\n" + "}\n"; TypedContent content = toTypedContent(before, ContentTypes.APPLICATION_PROTOBUF); @@ -104,63 +84,31 @@ void testProtobuf() { @Test void testGraphQL() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.GRAPHQL); - - String before = "type Query {\r\n" + - " bookById(id: ID): Book \r\n" + - "}\r\n" + - "\r\n" + - "type Book {\r\n" + - " id: ID\r\n" + - " name: String\r\n" + - " pageCount: Int\r\n" + - " author: Author\r\n" + - "}\r\n" + - "\r\n" + - "type Author {\r\n" + - " id: ID\r\n\r\n" + - " firstName: String\r\n" + - " lastName: String\r\n" + - "}\r\n\r\n"; - String expected = "type Author {\n" + - " firstName: String\n" + - " id: ID\n" + - " lastName: String\n" + - "}\n" + - "\n" + - "type Book {\n" + - " author: Author\n" + - " id: ID\n" + - " name: String\n" + - " pageCount: Int\n" + - "}\n" + - "\n" + - "type Query {\n" + - " bookById(id: ID): Book\n" + - "}\n" + - ""; - + + String before = "type Query {\r\n" + " bookById(id: ID): Book \r\n" + "}\r\n" + "\r\n" + + "type Book {\r\n" + " id: ID\r\n" + " name: String\r\n" + " pageCount: Int\r\n" + + " author: Author\r\n" + "}\r\n" + "\r\n" + "type Author {\r\n" + " id: ID\r\n\r\n" + + " firstName: String\r\n" + " lastName: String\r\n" + "}\r\n\r\n"; + String expected = "type Author {\n" + " firstName: String\n" + " id: ID\n" + " lastName: String\n" + + "}\n" + "\n" + "type Book {\n" + " author: Author\n" + " id: ID\n" + " name: String\n" + + " pageCount: Int\n" + "}\n" + "\n" + "type Query {\n" + " bookById(id: ID): Book\n" + + "}\n" + ""; + TypedContent content = toTypedContent(before, ContentTypes.APPLICATION_GRAPHQL); String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); Assertions.assertEquals(expected, actual); } - + @Test void testKafkaConnect() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.KCONNECT); - - String before = "{\r\n" + - " \"type\": \"struct\",\r\n" + - " \"fields\": [\r\n" + - " {\r\n" + - " \"type\": \"string\",\r\n" + - " \"optional\": false,\r\n" + - " \"field\": \"bar\"\r\n" + - " }\r\n" + - " ],\r\n" + - " \"optional\": false\r\n" + - "}"; + + String before = "{\r\n" + " \"type\": \"struct\",\r\n" + " \"fields\": [\r\n" + " {\r\n" + + " \"type\": \"string\",\r\n" + " \"optional\": false,\r\n" + + " \"field\": \"bar\"\r\n" + " }\r\n" + " ],\r\n" + + " \"optional\": false\r\n" + "}"; String expected = "{\"fields\":[{\"field\":\"bar\",\"optional\":false,\"type\":\"string\"}],\"optional\":false,\"type\":\"struct\"}"; - + TypedContent content = toTypedContent(before); String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); Assertions.assertEquals(expected, actual); @@ -168,34 +116,34 @@ void testKafkaConnect() { @Test void testXsd() { - ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.XSD); + ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.XSD); + + TypedContent content = resourceToTypedContentHandle("xml-schema-before.xsd"); + String expected = resourceToString("xml-schema-expected.xsd"); - TypedContent content = resourceToTypedContentHandle("xml-schema-before.xsd"); - String expected = resourceToString("xml-schema-expected.xsd"); - - String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); - Assertions.assertEquals(expected, actual); + String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); + Assertions.assertEquals(expected, actual); } - - @Test - void testWsdl() { + + @Test + void testWsdl() { ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.WSDL); TypedContent content = resourceToTypedContentHandle("wsdl-before.wsdl"); String expected = resourceToString("wsdl-expected.wsdl"); - + + String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); + Assertions.assertEquals(expected, actual); + } + + @Test + void testXml() { + ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.XML); + + TypedContent content = resourceToTypedContentHandle("xml-before.xml"); + String expected = resourceToString("xml-expected.xml"); + String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); Assertions.assertEquals(expected, actual); - } - - @Test - void testXml() { - ContentCanonicalizer canonicalizer = getContentCanonicalizer(ArtifactType.XML); - - TypedContent content = resourceToTypedContentHandle("xml-before.xml"); - String expected = resourceToString("xml-expected.xml"); - - String actual = canonicalizer.canonicalize(content, Collections.emptyMap()).getContent().content(); - Assertions.assertEquals(expected, actual); - } + } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/content/ContentExtractorTest.java b/app/src/test/java/io/apicurio/registry/noprofile/content/ContentExtractorTest.java index 98460086ed..6cb01c41ff 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/content/ContentExtractorTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/content/ContentExtractorTest.java @@ -15,118 +15,69 @@ @QuarkusTest public class ContentExtractorTest extends AbstractResourceTestBase { - private static final String avroFormat = "{\r\n" + - " \"type\": \"record\",\r\n" + - " \"namespace\": \"com.example\",\r\n" + - " \"name\": \"%s\",\r\n" + - " \"fields\": [\r\n" + - " { \"name\": \"first\", \"type\": \"string\" },\r\n" + - " { \"name\": \"middle\", \"type\": \"string\" },\r\n" + - " { \"name\": \"last\", \"type\": \"string\" }\r\n" + - " ]\r\n" + - "} "; - - private static final String jsonFormat = "{\r\n" + - " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\r\n" + - " \"title\": \"%s\",\r\n" + - " \"description\": \"%s\",\r\n" + - " \"type\": \"object\",\r\n" + - " \r\n" + - " \"properties\": {\r\n" + - " \r\n" + - " \"id\": {\r\n" + - " \"description\": \"The unique identifier for a product\",\r\n" + - " \"type\": \"integer\"\r\n" + - " },\r\n" + - " \r\n" + - " \"name\": {\r\n" + - " \"description\": \"Name of the product\",\r\n" + - " \"type\": \"string\"\r\n" + - " },\r\n" + - " \r\n" + - " \"price\": {\r\n" + - " \"type\": \"number\",\r\n" + - " \"minimum\": 0,\r\n" + - " \"exclusiveMinimum\": true\r\n" + - " }\r\n" + - " },\r\n" + - " \r\n" + - " \"required\": [\"id\", \"name\", \"price\"]\r\n" + - "}"; - - private static final String openapiFormat = "{\r\n" + - " \"openapi\": \"3.0.2\",\r\n" + - " \"info\": {\r\n" + - " \"title\": \"%s\",\r\n" + - " \"version\": \"1.0.0\",\r\n" + - " \"description\": \"%s\"\r\n" + - " }\r\n" + - "}"; - - private static final String asyncapiFormat = "{\r\n" + - " \"asyncapi\" : \"2.0.0\",\r\n" + - " \"info\" : {\r\n" + - " \"title\": \"%s\",\r\n" + - " \"description\": \"%s\",\r\n" + - " \"version\": \"1.0.1\"\r\n" + - " }\r\n" + - "}"; - - private static final String wsdlFormat = "\r\n" + - "\r\n" + - "\r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - "\r\n" + - ""; - - private static final String xsdFormat = "\r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - "\r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - " \r\n" + - ""; + private static final String avroFormat = "{\r\n" + " \"type\": \"record\",\r\n" + + " \"namespace\": \"com.example\",\r\n" + " \"name\": \"%s\",\r\n" + + " \"fields\": [\r\n" + " { \"name\": \"first\", \"type\": \"string\" },\r\n" + + " { \"name\": \"middle\", \"type\": \"string\" },\r\n" + + " { \"name\": \"last\", \"type\": \"string\" }\r\n" + " ]\r\n" + "} "; + + private static final String jsonFormat = "{\r\n" + + " \"$schema\": \"http://json-schema.org/draft-04/schema#\",\r\n" + " \"title\": \"%s\",\r\n" + + " \"description\": \"%s\",\r\n" + " \"type\": \"object\",\r\n" + " \r\n" + + " \"properties\": {\r\n" + " \r\n" + " \"id\": {\r\n" + + " \"description\": \"The unique identifier for a product\",\r\n" + + " \"type\": \"integer\"\r\n" + " },\r\n" + " \r\n" + " \"name\": {\r\n" + + " \"description\": \"Name of the product\",\r\n" + " \"type\": \"string\"\r\n" + + " },\r\n" + " \r\n" + " \"price\": {\r\n" + + " \"type\": \"number\",\r\n" + " \"minimum\": 0,\r\n" + + " \"exclusiveMinimum\": true\r\n" + " }\r\n" + " },\r\n" + " \r\n" + + " \"required\": [\"id\", \"name\", \"price\"]\r\n" + "}"; + + private static final String openapiFormat = "{\r\n" + " \"openapi\": \"3.0.2\",\r\n" + + " \"info\": {\r\n" + " \"title\": \"%s\",\r\n" + " \"version\": \"1.0.0\",\r\n" + + " \"description\": \"%s\"\r\n" + " }\r\n" + "}"; + + private static final String asyncapiFormat = "{\r\n" + " \"asyncapi\" : \"2.0.0\",\r\n" + + " \"info\" : {\r\n" + " \"title\": \"%s\",\r\n" + " \"description\": \"%s\",\r\n" + + " \"version\": \"1.0.1\"\r\n" + " }\r\n" + "}"; + + private static final String wsdlFormat = "\r\n" + + "\r\n" + "\r\n" + " \r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + " \r\n" + " \r\n" + "\r\n" + + ""; + + private static final String xsdFormat = "\r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + " \r\n" + + "\r\n" + " \r\n" + " \r\n" + + " \r\n" + + " \r\n" + + " \r\n" + + " \r\n" + + " \r\n" + " \r\n" + + " \r\n" + + " \r\n" + ""; final String groupId = getClass().getSimpleName(); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/DownloadRegistryMojoTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/DownloadRegistryMojoTest.java index e463d58531..7fdc26151e 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/DownloadRegistryMojoTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/DownloadRegistryMojoTest.java @@ -32,8 +32,10 @@ public void testDownloadIds() throws Exception { String groupId = DownloadRegistryMojoTest.class.getName(); String artifactId = generateArtifactId(); - Schema schema = Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.NULL))); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, schema.toString(), ContentTypes.APPLICATION_JSON); + Schema schema = Schema.createUnion( + Arrays.asList(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.NULL))); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + schema.toString(), ContentTypes.APPLICATION_JSON); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); List artifacts = new ArrayList<>(); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegisterRegistryMojoTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegisterRegistryMojoTest.java index a2f12ea315..40468183db 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegisterRegistryMojoTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegisterRegistryMojoTest.java @@ -27,8 +27,10 @@ public void createMojo() { public void testRegister() throws IOException, MojoFailureException, MojoExecutionException { super.testRegister(mojo, groupId); - Assertions.assertNotNull(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(KEY_SUBJECT).get()); - Assertions.assertNotNull(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(VALUE_SUBJECT).get()); + Assertions.assertNotNull( + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(KEY_SUBJECT).get()); + Assertions.assertNotNull( + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(VALUE_SUBJECT).get()); } @Test @@ -36,7 +38,9 @@ public void testSkipRegister() throws IOException, MojoFailureException, MojoExe this.mojo.setSkip(true); super.testRegister(mojo, groupId); - Assertions.assertThrows(Exception.class, () -> clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(KEY_SUBJECT).get()); - Assertions.assertThrows(Exception.class, () -> clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(VALUE_SUBJECT).get()); + Assertions.assertThrows(Exception.class, + () -> clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(KEY_SUBJECT).get()); + Assertions.assertThrows(Exception.class, + () -> clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(VALUE_SUBJECT).get()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoTestBase.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoTestBase.java index 3d7b298006..200b6305bc 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoTestBase.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoTestBase.java @@ -48,9 +48,11 @@ protected void writeContent(File outputPath, byte[] content) throws IOException } } - protected void testRegister(RegisterRegistryMojo mojo, String groupId) throws IOException, MojoFailureException, MojoExecutionException { + protected void testRegister(RegisterRegistryMojo mojo, String groupId) + throws IOException, MojoFailureException, MojoExecutionException { Schema keySchema = Schema.create(Schema.Type.STRING); - Schema valueSchema = Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.NULL))); + Schema valueSchema = Schema.createUnion( + Arrays.asList(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.NULL))); File keySchemaFile = new File(this.tempDirectory, KEY_SUBJECT + ".avsc"); File valueSchemaFile = new File(this.tempDirectory, VALUE_SUBJECT + ".avsc"); writeContent(keySchemaFile, keySchema.toString(true).getBytes(StandardCharsets.UTF_8)); @@ -65,7 +67,8 @@ protected void testRegister(RegisterRegistryMojo mojo, String groupId) throws IO mojo.execute(); } - private static List createArtifacts(String groupId, File keySchemaFile, File valueSchemaFile) { + private static List createArtifacts(String groupId, File keySchemaFile, + File valueSchemaFile) { List artifacts = new ArrayList<>(); RegisterArtifact keySchemaArtifact = new RegisterArtifact(); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithAutoReferencesTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithAutoReferencesTest.java index df4fc9a569..77e4c47936 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithAutoReferencesTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithAutoReferencesTest.java @@ -32,7 +32,6 @@ public class RegistryMojoWithAutoReferencesTest extends RegistryMojoTestBase { private static final String AVSC_SCHEMA_EXTENSION = ".avsc"; private static final String JSON_SCHEMA_EXTENSION = ".json"; - RegisterRegistryMojo registerMojo; DownloadRegistryMojo downloadMojo; @@ -52,7 +51,8 @@ public void autoRegisterAvroWithReferences() throws Exception { File tradeRawFile = new File(getClass().getResource("TradeRawArray.avsc").getFile()); - Set avroFiles = Arrays.stream(Objects.requireNonNull(tradeRawFile.getParentFile().listFiles((dir, name) -> name.endsWith(AVSC_SCHEMA_EXTENSION)))) + Set avroFiles = Arrays.stream(Objects.requireNonNull( + tradeRawFile.getParentFile().listFiles((dir, name) -> name.endsWith(AVSC_SCHEMA_EXTENSION)))) .map(file -> { FileInputStream fis = null; try { @@ -60,8 +60,7 @@ public void autoRegisterAvroWithReferences() throws Exception { } catch (FileNotFoundException e) { } return IoUtil.toString(fis); - }) - .collect(Collectors.toSet()); + }).collect(Collectors.toSet()); RegisterArtifact tradeRawArtifact = new RegisterArtifact(); tradeRawArtifact.setGroupId(groupId); @@ -74,28 +73,27 @@ public void autoRegisterAvroWithReferences() throws Exception { registerMojo.setArtifacts(Collections.singletonList(tradeRawArtifact)); registerMojo.execute(); - //Assertions + // Assertions validateStructure(groupId, artifactId, 1, 3, avroFiles); } @Test public void autoRegisterProtoWithReferences() throws Exception { - //Preparation + // Preparation String groupId = "autoRegisterProtoWithReferences"; String artifactId = "tableNotification"; File tableNotificationFile = new File(getClass().getResource("table_notification.proto").getFile()); - Set protoFiles = Arrays.stream(Objects.requireNonNull(tableNotificationFile.getParentFile().listFiles((dir, name) -> name.endsWith(PROTO_SCHEMA_EXTENSION)))) - .map(file -> { + Set protoFiles = Arrays.stream(Objects.requireNonNull(tableNotificationFile.getParentFile() + .listFiles((dir, name) -> name.endsWith(PROTO_SCHEMA_EXTENSION)))).map(file -> { FileInputStream fis = null; try { fis = new FileInputStream(file); } catch (FileNotFoundException e) { } return IoUtil.toString(fis).trim(); - }) - .collect(Collectors.toSet()); + }).collect(Collectors.toSet()); RegisterArtifact tableNotification = new RegisterArtifact(); tableNotification.setGroupId(groupId); @@ -107,22 +105,23 @@ public void autoRegisterProtoWithReferences() throws Exception { registerMojo.setArtifacts(Collections.singletonList(tableNotification)); - //Execution + // Execution registerMojo.execute(); - //Assertions + // Assertions validateStructure(groupId, artifactId, 2, 4, protoFiles); } @Test public void autoRegisterJsonSchemaWithReferences() throws Exception { - //Preparation + // Preparation String groupId = "autoRegisterJsonSchemaWithReferences"; String artifactId = "citizen"; File citizenFile = new File(getClass().getResource("citizen.json").getFile()); - Set protoFiles = Arrays.stream(Objects.requireNonNull(citizenFile.getParentFile().listFiles((dir, name) -> name.endsWith(JSON_SCHEMA_EXTENSION)))) + Set protoFiles = Arrays.stream(Objects.requireNonNull( + citizenFile.getParentFile().listFiles((dir, name) -> name.endsWith(JSON_SCHEMA_EXTENSION)))) .map(file -> { FileInputStream fis = null; try { @@ -130,8 +129,7 @@ public void autoRegisterJsonSchemaWithReferences() throws Exception { } catch (FileNotFoundException e) { } return IoUtil.toString(fis).trim(); - }) - .collect(Collectors.toSet()); + }).collect(Collectors.toSet()); RegisterArtifact citizen = new RegisterArtifact(); citizen.setGroupId(groupId); @@ -143,67 +141,52 @@ public void autoRegisterJsonSchemaWithReferences() throws Exception { registerMojo.setArtifacts(Collections.singletonList(citizen)); - //Execution + // Execution registerMojo.execute(); - //Assertions + // Assertions validateStructure(groupId, artifactId, 3, 4, protoFiles); } - private void validateStructure(String groupId, String artifactId, int expectedMainReferences, int expectedTotalArtifacts, Set originalContents) throws Exception { - final VersionMetaData artifactWithReferences = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - final String mainContent = - new String( - clientV3 - .groups() - .byGroupId(groupId) - .artifacts() - .byArtifactId(artifactId) - .versions() - .byVersionExpression(artifactWithReferences.getVersion()) - .content() - .get() - .readAllBytes(), StandardCharsets.UTF_8); - - Assertions.assertTrue(originalContents.contains(mainContent)); //The main content has been registered as-is. - - final List mainArtifactReferences = clientV3.ids().globalIds().byGlobalId(artifactWithReferences.getGlobalId()).references().get(); - - //The main artifact has the expected number of references + private void validateStructure(String groupId, String artifactId, int expectedMainReferences, + int expectedTotalArtifacts, Set originalContents) throws Exception { + final VersionMetaData artifactWithReferences = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + final String mainContent = new String(clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression(artifactWithReferences.getVersion()) + .content().get().readAllBytes(), StandardCharsets.UTF_8); + + Assertions.assertTrue(originalContents.contains(mainContent)); // The main content has been registered + // as-is. + + final List mainArtifactReferences = clientV3.ids().globalIds() + .byGlobalId(artifactWithReferences.getGlobalId()).references().get(); + + // The main artifact has the expected number of references Assertions.assertEquals(expectedMainReferences, mainArtifactReferences.size()); - //Validate all the contents are registered as they are in the file system. + // Validate all the contents are registered as they are in the file system. validateReferences(mainArtifactReferences, originalContents); - //The total number of artifacts for the directory structure is the expected. - Assertions.assertEquals(expectedTotalArtifacts, clientV3.groups().byGroupId(groupId).artifacts().get().getCount().intValue()); + // The total number of artifacts for the directory structure is the expected. + Assertions.assertEquals(expectedTotalArtifacts, + clientV3.groups().byGroupId(groupId).artifacts().get().getCount().intValue()); } - private void validateReferences(List artifactReferences, Set loadedContents) throws Exception { + private void validateReferences(List artifactReferences, Set loadedContents) + throws Exception { for (ArtifactReference artifactReference : artifactReferences) { - String referenceContent = new String( - clientV3 - .groups() - .byGroupId(artifactReference.getGroupId()) - .artifacts() - .byArtifactId(artifactReference.getArtifactId()) - .versions() - .byVersionExpression(artifactReference.getVersion()) - .content() - .get() - .readAllBytes(), StandardCharsets.UTF_8); - VersionMetaData referenceMetadata = clientV3 - .groups() - .byGroupId(artifactReference.getGroupId()) - .artifacts() - .byArtifactId(artifactReference.getArtifactId()) - .versions() - .byVersionExpression("branch=latest") - .get() - ; + String referenceContent = new String(clientV3.groups().byGroupId(artifactReference.getGroupId()) + .artifacts().byArtifactId(artifactReference.getArtifactId()).versions() + .byVersionExpression(artifactReference.getVersion()).content().get().readAllBytes(), + StandardCharsets.UTF_8); + VersionMetaData referenceMetadata = clientV3.groups().byGroupId(artifactReference.getGroupId()) + .artifacts().byArtifactId(artifactReference.getArtifactId()).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertTrue(loadedContents.contains(referenceContent.trim())); - List nestedReferences = clientV3.ids().globalIds().byGlobalId(referenceMetadata.getGlobalId()).references().get(); + List nestedReferences = clientV3.ids().globalIds() + .byGlobalId(referenceMetadata.getGlobalId()).references().get(); if (!nestedReferences.isEmpty()) { validateReferences(nestedReferences, loadedContents); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithMinifyTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithMinifyTest.java index b052917d5d..94d10308b3 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithMinifyTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithMinifyTest.java @@ -47,19 +47,24 @@ public void testMinify() throws Exception { registerMojo.execute(); // Wait for the artifact to be created. - InputStream artifactInputStream = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("userInfoMinified").versions().byVersionExpression("branch=latest").content().get(); + InputStream artifactInputStream = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId("userInfoMinified").versions().byVersionExpression("branch=latest").content() + .get(); String artifactContent = new String(artifactInputStream.readAllBytes(), StandardCharsets.UTF_8); - Assertions.assertEquals("{\"type\":\"record\",\"name\":\"userInfo\",\"namespace\":\"my.example\",\"fields\":[{\"name\":\"age\",\"type\":\"int\"}]}", artifactContent); + Assertions.assertEquals( + "{\"type\":\"record\",\"name\":\"userInfo\",\"namespace\":\"my.example\",\"fields\":[{\"name\":\"age\",\"type\":\"int\"}]}", + artifactContent); // Wait for the artifact to be created. - artifactInputStream = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("userInfoNotMinified").versions().byVersionExpression("branch=latest").content().get(); + artifactInputStream = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId("userInfoNotMinified").versions().byVersionExpression("branch=latest").content() + .get(); artifactContent = new String(artifactInputStream.readAllBytes(), StandardCharsets.UTF_8); - Assertions.assertEquals("{\n" + - " \"type\" : \"record\",\n" + - " \"name\" : \"userInfo\",\n" + - " \"namespace\" : \"my.example\",\n" + - " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\n" + - "}", artifactContent); + Assertions.assertEquals( + "{\n" + " \"type\" : \"record\",\n" + " \"name\" : \"userInfo\",\n" + + " \"namespace\" : \"my.example\",\n" + + " \"fields\" : [{\"name\" : \"age\", \"type\" : \"int\"}]\n" + "}", + artifactContent); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithReferencesTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithReferencesTest.java index 7837cdf7c4..b2c7b64ff4 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithReferencesTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/RegistryMojoWithReferencesTest.java @@ -64,11 +64,9 @@ public void testMojosWithReferences() throws IOException, MojoFailureException, tradeKeyArtifact.setReferences(Collections.singletonList(exchangeArtifact)); tradeRawArtifact.setReferences(Collections.singletonList(tradeKeyArtifact)); - registerMojo.setArtifacts(Collections.singletonList(tradeRawArtifact)); registerMojo.execute(); - DownloadArtifact tradeRawDownload = new DownloadArtifact(); tradeRawDownload.setArtifactId("tradeRaw"); tradeRawDownload.setGroupId(groupId); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/maven/TestUpdateRegistryMojoTest.java b/app/src/test/java/io/apicurio/registry/noprofile/maven/TestUpdateRegistryMojoTest.java index 3b64f39295..750f0921d1 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/maven/TestUpdateRegistryMojoTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/maven/TestUpdateRegistryMojoTest.java @@ -36,15 +36,12 @@ public void testCompatibility() throws Exception { String groupId = TestUpdateRegistryMojoTest.class.getName(); String artifactId = generateArtifactId(); - Schema schema = new Schema.Parser().parse("{\"namespace\": \"example.avro\"," + - " \"type\": \"record\"," + - " \"name\": \"user\"," + - " \"fields\": [" + - " {\"name\": \"name\", \"type\": \"string\"}," + - " {\"name\": \"favorite_number\", \"type\": \"int\"}" + - " ]" + - "}"); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, schema.toString(), ContentTypes.APPLICATION_JSON); + Schema schema = new Schema.Parser() + .parse("{\"namespace\": \"example.avro\"," + " \"type\": \"record\"," + " \"name\": \"user\"," + + " \"fields\": [" + " {\"name\": \"name\", \"type\": \"string\"}," + + " {\"name\": \"favorite_number\", \"type\": \"int\"}" + " ]" + "}"); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + schema.toString(), ContentTypes.APPLICATION_JSON); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); CreateRule createRule = new CreateRule(); @@ -54,20 +51,18 @@ public void testCompatibility() throws Exception { // Wait for the rule configuration to be set. TestUtils.retry(() -> { - Rule rconfig = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.getValue()).get(); + Rule rconfig = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .byRuleType(RuleType.COMPATIBILITY.getValue()).get(); Assertions.assertEquals("BACKWARD", rconfig.getConfig()); }); // add new field - Schema schema2 = new Schema.Parser().parse("{\"namespace\": \"example.avro\"," + - " \"type\": \"record\"," + - " \"name\": \"user\"," + - " \"fields\": [" + - " {\"name\": \"name\", \"type\": \"string\"}," + - " {\"name\": \"favorite_number\", \"type\": \"string\"}," + - " {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}" + - " ]" + - "}"); + Schema schema2 = new Schema.Parser() + .parse("{\"namespace\": \"example.avro\"," + " \"type\": \"record\"," + " \"name\": \"user\"," + + " \"fields\": [" + " {\"name\": \"name\", \"type\": \"string\"}," + + " {\"name\": \"favorite_number\", \"type\": \"string\"}," + + " {\"name\": \"favorite_color\", \"type\": \"string\", \"default\": \"green\"}" + + " ]" + "}"); File file = new File(tempDirectory, artifactId + ".avsc"); writeContent(file, schema2.toString().getBytes(StandardCharsets.UTF_8)); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/resolver/SchemaResolverTest.java b/app/src/test/java/io/apicurio/registry/noprofile/resolver/SchemaResolverTest.java index d812ed2b0a..d8543f642b 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/resolver/SchemaResolverTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/resolver/SchemaResolverTest.java @@ -48,7 +48,8 @@ public void testDynamicStrategy() throws Exception { resolver.configure(config, new SchemaParser() { @Override - public Schema parseSchema(byte[] rawSchema, Map> resolvedReferences) { + public Schema parseSchema(byte[] rawSchema, + Map> resolvedReferences) { return null; } @@ -76,13 +77,15 @@ public String artifactType() { } }); - Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + Schema schema = new Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); String artifactId = TestUtils.generateArtifactId(); createArtifact(artifactId, ArtifactType.AVRO, schema.toString(), ContentTypes.APPLICATION_JSON); GenericRecord avroRecord = new GenericData.Record(schema); avroRecord.put("bar", "somebar"); - Record record = new CustomResolverRecord(avroRecord, ArtifactReference.builder().groupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifactId(artifactId).build()); + Record record = new CustomResolverRecord(avroRecord, ArtifactReference.builder() + .groupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifactId(artifactId).build()); var lookup = resolver.resolveSchema(record); assertNull(lookup.getGroupId()); @@ -90,8 +93,12 @@ public String artifactType() { assertEquals(schema.toString(), new String(lookup.getParsedSchema().getRawSchema())); assertNull(lookup.getParsedSchema().getParsedSchema()); - var runtimeException = Assertions.assertThrows(RuntimeException.class, () -> resolver.resolveSchema(new CustomResolverRecord(avroRecord, ArtifactReference.builder().groupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifactId("foo").build()))); - io.apicurio.registry.rest.client.models.Error error = (io.apicurio.registry.rest.client.models.Error) runtimeException.getCause(); + var runtimeException = Assertions.assertThrows(RuntimeException.class, + () -> resolver.resolveSchema(new CustomResolverRecord(avroRecord, + ArtifactReference.builder().groupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifactId("foo").build()))); + io.apicurio.registry.rest.client.models.Error error = (io.apicurio.registry.rest.client.models.Error) runtimeException + .getCause(); assertEquals("VersionNotFoundException", error.getName()); assertEquals(404, error.getErrorCode()); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v2/LegacyV2ApiTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v2/LegacyV2ApiTest.java index 0da91d4560..3e05e02ee7 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v2/LegacyV2ApiTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v2/LegacyV2ApiTest.java @@ -25,33 +25,22 @@ public void testLegacyLabels() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String artifactId = "testLegacyLabels"; - this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update the artifact meta-data List labels = List.of("one", "two", "three"); EditableMetaData metaData = new EditableMetaData(); metaData.setName(artifactId); metaData.setLabels(labels); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(metaData) - .put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(metaData).put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then() .statusCode(204); - + // Get the (updated) artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() - .statusCode(200) - .body("id", equalTo(artifactId)) - .body("version", anything()) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then().statusCode(200) + .body("id", equalTo(artifactId)).body("version", anything()) .body("labels", equalToObject(labels)); } @@ -60,33 +49,22 @@ public void testLegacyProperties() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String artifactId = "testLegacyProperties"; - this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update the artifact meta-data Map properties = Map.of("one", "one-value", "two", "two-value"); EditableMetaData metaData = new EditableMetaData(); metaData.setName(artifactId); metaData.setProperties(properties); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(metaData) - .put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(metaData).put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then() .statusCode(204); - + // Get the (updated) artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() - .statusCode(200) - .body("id", equalTo(artifactId)) - .body("version", anything()) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then().statusCode(200) + .body("id", equalTo(artifactId)).body("version", anything()) .body("properties", equalToObject(properties)); } @@ -95,38 +73,27 @@ public void testLegacyPropertiesWithLabels() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String artifactId = "testLegacyPropertiesWithLabels"; - this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); List labels = List.of("label-one", "label-two"); - Map properties = Map.of("property-one", "property-one-value", "property-two", "property-two-value"); + Map properties = Map.of("property-one", "property-one-value", "property-two", + "property-two-value"); // Update the artifact meta-data EditableMetaData metaData = new EditableMetaData(); metaData.setName(artifactId); metaData.setLabels(labels); metaData.setProperties(properties); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(metaData) - .put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(metaData).put("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then() .statusCode(204); - + // Get the (updated) artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta") - .then() - .statusCode(200) - .body("id", equalTo(artifactId)) - .body("version", anything()) - .body("labels", equalToObject(labels)) - .body("properties", equalToObject(properties)); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v2/groups/{groupId}/artifacts/{artifactId}/meta").then().statusCode(200) + .body("id", equalTo(artifactId)).body("version", anything()) + .body("labels", equalToObject(labels)).body("properties", equalToObject(properties)); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/AllYamlTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/AllYamlTest.java index d6c602106d..9dce6d7ba1 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/AllYamlTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/AllYamlTest.java @@ -62,40 +62,41 @@ public class AllYamlTest extends AbstractResourceTestBase { operationId: test """; private static String JSON_CONTENT = """ - { - "openapi": "3.0.2", - "info": { - "title": "Empty API", - "version": "1.0.0", - "description": "Just an empty API." - }, - "paths": { - "/test": { - "get": { - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "type": "string" - } - } - }, - "description": "Success." - } - }, - "operationId": "test" - } - } - } - }"""; + { + "openapi": "3.0.2", + "info": { + "title": "Empty API", + "version": "1.0.0", + "description": "Just an empty API." + }, + "paths": { + "/test": { + "get": { + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "type": "string" + } + } + }, + "description": "Success." + } + }, + "operationId": "test" + } + } + } + }"""; @Test public void testCreateYamlArtifact() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + YAML_CONTENT, ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); } @@ -104,7 +105,8 @@ public void testCreateYamlArtifactDiscoverType() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, null, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, null, YAML_CONTENT, + ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); } @@ -119,7 +121,8 @@ public void testCreateYamlArtifactWithValidity() throws Exception { clientV3.admin().rules().post(createRule); try { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + YAML_CONTENT, ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); } catch (Error e) { System.out.println("ERROR: " + e.getDetail()); @@ -133,11 +136,14 @@ public void testUpdateYamlArtifact() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + YAML_CONTENT, ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_V2, ContentTypes.APPLICATION_YAML); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_V2, + ContentTypes.APPLICATION_YAML); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); } @Test @@ -145,7 +151,8 @@ public void testUpdateYamlArtifactWithCompatibility() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + YAML_CONTENT, ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); // Enable the compatibility rule for the artifact @@ -155,8 +162,10 @@ public void testUpdateYamlArtifactWithCompatibility() throws Exception { clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().post(createRule); // Now create a new version - CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_V2, ContentTypes.APPLICATION_YAML); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_V2, + ContentTypes.APPLICATION_YAML); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); } @Test @@ -164,7 +173,8 @@ public void testUpdateYamlArtifactWithIntegrity() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, YAML_CONTENT, ContentTypes.APPLICATION_YAML); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + YAML_CONTENT, ContentTypes.APPLICATION_YAML); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); // Enable the compatibility rule for the artifact @@ -174,9 +184,11 @@ public void testUpdateYamlArtifactWithIntegrity() throws Exception { clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().post(createRule); // Now create a new version with a missing $ref - CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_WITH_REF, ContentTypes.APPLICATION_YAML); + CreateVersion createVersion = TestUtils.clientCreateVersion(YAML_CONTENT_WITH_REF, + ContentTypes.APPLICATION_YAML); Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); }); } @@ -189,41 +201,58 @@ public void testCanonicalContent() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, yamlContent, ContentTypes.APPLICATION_YAML); - CreateArtifactResponse response = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + yamlContent, ContentTypes.APPLICATION_YAML); + CreateArtifactResponse response = clientV3.groups().byGroupId(groupId).artifacts() + .post(createArtifact); // Search for the version by its content as YAML - VersionSearchResults results = clientV3.search().versions().post(IoUtil.toStream(yamlContent), ContentTypes.APPLICATION_YAML); + VersionSearchResults results = clientV3.search().versions().post(IoUtil.toStream(yamlContent), + ContentTypes.APPLICATION_YAML); Assertions.assertNotNull(results); Assertions.assertEquals(1, results.getCount()); - Assertions.assertEquals(response.getVersion().getArtifactId(), results.getVersions().get(0).getArtifactId()); - Assertions.assertEquals(response.getVersion().getGroupId(), results.getVersions().get(0).getGroupId()); - Assertions.assertEquals(response.getVersion().getVersion(), results.getVersions().get(0).getVersion()); - Assertions.assertEquals(response.getVersion().getGlobalId(), results.getVersions().get(0).getGlobalId()); + Assertions.assertEquals(response.getVersion().getArtifactId(), + results.getVersions().get(0).getArtifactId()); + Assertions.assertEquals(response.getVersion().getGroupId(), + results.getVersions().get(0).getGroupId()); + Assertions.assertEquals(response.getVersion().getVersion(), + results.getVersions().get(0).getVersion()); + Assertions.assertEquals(response.getVersion().getGlobalId(), + results.getVersions().get(0).getGlobalId()); // Search for the version by its canonical content as YAML - results = clientV3.search().versions().post(IoUtil.toStream(yamlContent), ContentTypes.APPLICATION_YAML, config -> { - config.queryParameters.canonical = true; - config.queryParameters.artifactType = ArtifactType.OPENAPI; - }); + results = clientV3.search().versions().post(IoUtil.toStream(yamlContent), + ContentTypes.APPLICATION_YAML, config -> { + config.queryParameters.canonical = true; + config.queryParameters.artifactType = ArtifactType.OPENAPI; + }); Assertions.assertNotNull(results); Assertions.assertEquals(1, results.getCount()); - Assertions.assertEquals(response.getVersion().getArtifactId(), results.getVersions().get(0).getArtifactId()); - Assertions.assertEquals(response.getVersion().getGroupId(), results.getVersions().get(0).getGroupId()); - Assertions.assertEquals(response.getVersion().getVersion(), results.getVersions().get(0).getVersion()); - Assertions.assertEquals(response.getVersion().getGlobalId(), results.getVersions().get(0).getGlobalId()); + Assertions.assertEquals(response.getVersion().getArtifactId(), + results.getVersions().get(0).getArtifactId()); + Assertions.assertEquals(response.getVersion().getGroupId(), + results.getVersions().get(0).getGroupId()); + Assertions.assertEquals(response.getVersion().getVersion(), + results.getVersions().get(0).getVersion()); + Assertions.assertEquals(response.getVersion().getGlobalId(), + results.getVersions().get(0).getGlobalId()); // Search for the version again by its canonical content as JSON - results = clientV3.search().versions().post(IoUtil.toStream(jsonContent), ContentTypes.APPLICATION_JSON, config -> { - config.queryParameters.canonical = true; - config.queryParameters.artifactType = ArtifactType.OPENAPI; - }); + results = clientV3.search().versions().post(IoUtil.toStream(jsonContent), + ContentTypes.APPLICATION_JSON, config -> { + config.queryParameters.canonical = true; + config.queryParameters.artifactType = ArtifactType.OPENAPI; + }); Assertions.assertNotNull(results); Assertions.assertEquals(1, results.getCount()); - Assertions.assertEquals(response.getVersion().getArtifactId(), results.getVersions().get(0).getArtifactId()); - Assertions.assertEquals(response.getVersion().getGroupId(), results.getVersions().get(0).getGroupId()); - Assertions.assertEquals(response.getVersion().getVersion(), results.getVersions().get(0).getVersion()); - Assertions.assertEquals(response.getVersion().getGlobalId(), results.getVersions().get(0).getGlobalId()); + Assertions.assertEquals(response.getVersion().getArtifactId(), + results.getVersions().get(0).getArtifactId()); + Assertions.assertEquals(response.getVersion().getGroupId(), + results.getVersions().get(0).getGroupId()); + Assertions.assertEquals(response.getVersion().getVersion(), + results.getVersions().get(0).getVersion()); + Assertions.assertEquals(response.getVersion().getGlobalId(), + results.getVersions().get(0).getGlobalId()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/BranchesTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/BranchesTest.java index 00229c2c1c..897b27d676 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/BranchesTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/BranchesTest.java @@ -30,12 +30,14 @@ public void testLatestBranch() throws Exception { createArtifact(groupId, artifactId, ArtifactType.JSON, "{}", ContentTypes.APPLICATION_JSON); createArtifactVersion(groupId, artifactId, "{}", ContentTypes.APPLICATION_JSON); - BranchMetaData latest = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("latest").get(); + BranchMetaData latest = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .branches().byBranchId("latest").get(); Assertions.assertNotNull(latest); Assertions.assertEquals("latest", latest.getBranchId()); Assertions.assertEquals(true, latest.getSystemDefined()); - VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("latest").versions().get(); + VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).branches().byBranchId("latest").versions().get(); Assertions.assertEquals(2, versions.getCount()); } @@ -49,7 +51,8 @@ public void testCreateBranch() throws Exception { CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); - BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .branches().post(createBranch); Assertions.assertNotNull(branch); Assertions.assertEquals(groupId, branch.getGroupId()); @@ -71,7 +74,8 @@ public void testCreateBranchWithVersions() throws Exception { createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); createBranch.setVersions(List.of("1", "2")); - BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .branches().post(createBranch); Assertions.assertNotNull(branch); Assertions.assertEquals(groupId, branch.getGroupId()); @@ -79,7 +83,8 @@ public void testCreateBranchWithVersions() throws Exception { Assertions.assertEquals("1.x", branch.getBranchId()); Assertions.assertEquals("Version 1.x", branch.getDescription()); - VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x").versions().get(); + VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).branches().byBranchId("1.x").versions().get(); Assertions.assertEquals(2, versions.getCount()); // Try to create branch with versions that do not exist. @@ -102,10 +107,12 @@ public void testGetBranch() throws Exception { CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Fetch that branch and assert - BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x").get(); + BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .branches().byBranchId("1.x").get(); Assertions.assertNotNull(branch); Assertions.assertEquals(groupId, branch.getGroupId()); Assertions.assertEquals(artifactId, branch.getArtifactId()); @@ -114,12 +121,14 @@ public void testGetBranch() throws Exception { // Get a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid").get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid").get(); }); // Get a branch from an artifact that doesn't exist. Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("1.x").get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("1.x").get(); }); } @@ -135,19 +144,23 @@ public void testGetBranches() throws Exception { CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); createBranch = new CreateBranch(); createBranch.setBranchId("2.x"); createBranch.setDescription("Version 2.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); createBranch = new CreateBranch(); createBranch.setBranchId("3.x"); createBranch.setDescription("Version 3.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); - BranchSearchResults results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().get(); + BranchSearchResults results = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).branches().get(); Assertions.assertNotNull(results); // There should be FOUR: latest, 1.x, 2.x, 3.x Assertions.assertEquals(4, results.getCount()); @@ -174,10 +187,12 @@ public void testUpdateBranch() throws Exception { CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Make sure it really exists - BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x").get(); + BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .branches().byBranchId("1.x").get(); Assertions.assertNotNull(branch); Assertions.assertEquals(groupId, branch.getGroupId()); Assertions.assertEquals(artifactId, branch.getArtifactId()); @@ -187,10 +202,12 @@ public void testUpdateBranch() throws Exception { // Update it EditableBranchMetaData update = new EditableBranchMetaData(); update.setDescription("Updated version 1.x branch."); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x").put(update); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x") + .put(update); // Check it now - branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1.x").get(); + branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("1.x").get(); Assertions.assertNotNull(branch); Assertions.assertEquals(groupId, branch.getGroupId()); Assertions.assertEquals(artifactId, branch.getArtifactId()); @@ -198,12 +215,14 @@ public void testUpdateBranch() throws Exception { // Update a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid").put(update); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid").put(update); }); // Update a branch in an artifact that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("1.x").put(update); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("1.x").put(update); }); } @@ -220,28 +239,34 @@ public void testDeleteBranch() throws Exception { CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1.x"); createBranch.setDescription("Version 1.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("2.x"); createBranch.setDescription("Version 2.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("3.x"); createBranch.setDescription("Version 3.x"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); - var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().get(); + var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .get(); Assertions.assertEquals(4, results.getCount()); - var bmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2.x").get(); + var bmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("2.x").get(); Assertions.assertEquals("2.x", bmd.getBranchId()); // Now delete branch 2.x - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2.x").delete(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2.x") + .delete(); // Assert that it's gone results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().get(); @@ -249,17 +274,20 @@ public void testDeleteBranch() throws Exception { // Try to get the branch that was deleted Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2.x").get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("2.x").get(); }); // Try to delete a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid").delete(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid").delete(); }); // Try to delete a branch of an artifact that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("2.x").delete(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("2.x").delete(); }); } @@ -273,65 +301,77 @@ public void testGetVersionsInBranch() throws Exception { // Create v2 CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v2"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v3 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v3"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v4 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v4"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v5 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create a branch CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("2-3"); createBranch.setDescription("Contains versions 2, 3"); createBranch.setVersions(List.of("2", "3")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("2-4"); createBranch.setDescription("Contains versions 2, 3, 4"); createBranch.setVersions(List.of("2", "3", "4")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("5"); createBranch.setDescription("Contains versions 2, 3, 4"); createBranch.setVersions(List.of("5")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Now make sure the branches contain the versions we think. - var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3").versions().get(); + var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("2-3").versions().get(); Assertions.assertEquals(2, results.getCount()); Assertions.assertEquals("v3", results.getVersions().get(0).getName()); Assertions.assertEquals("v2", results.getVersions().get(1).getName()); - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-4").versions().get(); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("2-4").versions().get(); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals("v4", results.getVersions().get(0).getName()); Assertions.assertEquals("v3", results.getVersions().get(1).getName()); Assertions.assertEquals("v2", results.getVersions().get(2).getName()); - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("5").versions().get(); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("5").versions().get(); Assertions.assertEquals(1, results.getCount()); Assertions.assertEquals("v5", results.getVersions().get(0).getName()); // Get versions in a branch that does not exist. Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid").versions().get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid").versions().get(); }); // Get versions in a branch of an artifact that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("2-4").versions().get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("2-4").versions().get(); }); } @@ -347,28 +387,34 @@ public void testReplaceBranchVersions() throws Exception { // Create v2 CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v2"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v3 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v3"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v4 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v4"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v5 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create a branch CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("test-branch"); createBranch.setVersions(List.of("2", "3", "4")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Make sure the branch has 2,3,4 on it. - var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("test-branch").versions().get(); + var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("test-branch").versions().get(); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals("v4", results.getVersions().get(0).getName()); Assertions.assertEquals("v3", results.getVersions().get(1).getName()); @@ -376,10 +422,12 @@ public void testReplaceBranchVersions() throws Exception { // Now replace the versions on the branch ReplaceBranchVersions newVersions = replaceVersions("1", "3", "5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("test-branch").versions().put(newVersions); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("test-branch").versions().put(newVersions); // Make sure the branch now has 1,3,5 on it. - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("test-branch").versions().get(); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("test-branch").versions().get(); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals("v5", results.getVersions().get(0).getName()); Assertions.assertEquals("v3", results.getVersions().get(1).getName()); @@ -387,12 +435,14 @@ public void testReplaceBranchVersions() throws Exception { // Replace versions in a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid-branch").versions().put(newVersions); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid-branch").versions().put(newVersions); }); // Replace versions in a branch of an artifact that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("test-branch").versions().put(newVersions); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("test-branch").versions().put(newVersions); }); } @@ -409,62 +459,81 @@ public void testAppendVersionToBranch() throws Exception { // Create v2 CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v2"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v3 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v3"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v4 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v4"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v5 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create a branch CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("1-4"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("2-3"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("3-5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Append some versions to the branches - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4").versions().post(addVersion("1")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4").versions().post(addVersion("2")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4").versions().post(addVersion("3")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4").versions().post(addVersion("4")); - - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3").versions().post(addVersion("2")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3").versions().post(addVersion("3")); - - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5").versions().post(addVersion("3")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5").versions().post(addVersion("4")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5").versions().post(addVersion("5")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4") + .versions().post(addVersion("1")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4") + .versions().post(addVersion("2")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4") + .versions().post(addVersion("3")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4") + .versions().post(addVersion("4")); + + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3") + .versions().post(addVersion("2")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3") + .versions().post(addVersion("3")); + + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5") + .versions().post(addVersion("3")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5") + .versions().post(addVersion("4")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5") + .versions().post(addVersion("5")); // Check the results - make sure the versions are on the branches - var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("1-4").versions().get(); + var results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("1-4").versions().get(); Assertions.assertEquals(4, results.getCount()); Assertions.assertEquals("v4", results.getVersions().get(0).getName()); Assertions.assertEquals("v3", results.getVersions().get(1).getName()); Assertions.assertEquals("v2", results.getVersions().get(2).getName()); Assertions.assertEquals("v1", results.getVersions().get(3).getName()); - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("2-3").versions().get(); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("2-3").versions().get(); Assertions.assertEquals(2, results.getCount()); Assertions.assertEquals("v3", results.getVersions().get(0).getName()); Assertions.assertEquals("v2", results.getVersions().get(1).getName()); - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("3-5").versions().get(); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("3-5").versions().get(); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals("v5", results.getVersions().get(0).getName()); Assertions.assertEquals("v4", results.getVersions().get(1).getName()); @@ -472,12 +541,14 @@ public void testAppendVersionToBranch() throws Exception { // Append a version to a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("invalid").versions().post(addVersion("3")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("invalid").versions().post(addVersion("3")); }); // Append a version to a branch of an artifact that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches().byBranchId("2-3").versions().post(addVersion("3")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("invalid").branches() + .byBranchId("2-3").versions().post(addVersion("3")); }); } @@ -493,59 +564,75 @@ public void testGetMostRecentVersionFromBranch() throws Exception { // Create v2 CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v2"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v3 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v3"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v4 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v4"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v5 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v5"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create v6 createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setName("v6"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); // Create a branch CreateBranch createBranch = new CreateBranch(); createBranch.setBranchId("evens"); createBranch.setVersions(List.of("2", "4")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Create a branch createBranch = new CreateBranch(); createBranch.setBranchId("odds"); createBranch.setVersions(List.of("1", "3")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .post(createBranch); // Get the most recent version from each branch - VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=evens").get(); + VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=evens").get(); Assertions.assertEquals("v4", vmd.getName()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=odds").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=odds").get(); Assertions.assertEquals("v3", vmd.getName()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertEquals("v6", vmd.getName()); // Append versions to the branches. - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("evens").versions().post(addVersion("6")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches().byBranchId("odds").versions().post(addVersion("5")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("evens").versions().post(addVersion("6")); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).branches() + .byBranchId("odds").versions().post(addVersion("5")); // Get the most recent version from each branch - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=evens").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=evens").get(); Assertions.assertEquals("v6", vmd.getName()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=odds").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=odds").get(); Assertions.assertEquals("v5", vmd.getName()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertEquals("v6", vmd.getName()); // Get the most recent version from a branch that does not exist Assertions.assertThrows(Exception.class, () -> { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=invalid").get(); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=invalid").get(); }); } @@ -555,7 +642,7 @@ private static AddVersionToBranch addVersion(String version) { return rval; } - private static ReplaceBranchVersions replaceVersions(String ... versions) { + private static ReplaceBranchVersions replaceVersions(String... versions) { ReplaceBranchVersions rval = new ReplaceBranchVersions(); rval.setVersions(List.of(versions)); return rval; diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/CustomizeDateFormatTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/CustomizeDateFormatTest.java index 6a38b99f04..47822242b6 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/CustomizeDateFormatTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/CustomizeDateFormatTest.java @@ -1,6 +1,5 @@ package io.apicurio.registry.noprofile.rest.v3; - import io.apicurio.registry.AbstractResourceTestBase; import io.apicurio.registry.types.ArtifactType; import io.apicurio.registry.types.ContentTypes; @@ -35,17 +34,13 @@ public void testOpenApiCompliantDateFormat() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, "testGetArtifactMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testGetArtifactMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Get the artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() - .statusCode(200) - .body("createdOn", new BaseMatcher() { + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() + .statusCode(200).body("createdOn", new BaseMatcher() { @Override public void describeTo(Description description) { diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/EmptyArtifactTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/EmptyArtifactTest.java index a6e6976200..4b928db9a7 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/EmptyArtifactTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/EmptyArtifactTest.java @@ -27,10 +27,12 @@ public void testCreateEmptyArtifact() throws Exception { clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .get(); Assertions.assertNotNull(amd); - VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(); + VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().get(); Assertions.assertNotNull(versions); Assertions.assertEquals(0, versions.getCount()); Assertions.assertEquals(0, versions.getVersions().size()); @@ -48,13 +50,16 @@ public void testCreateFirstVersion() throws Exception { clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").get(); + VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1").get(); Assertions.assertNotNull(vmd); Assertions.assertEquals("1", vmd.getVersion()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertNotNull(vmd); Assertions.assertEquals("1", vmd.getVersion()); } @@ -72,13 +77,16 @@ public void testCreateFirstCustomVersion() throws Exception { CreateVersion createVersion = TestUtils.clientCreateVersion("{}", ContentTypes.APPLICATION_JSON); createVersion.setVersion("1.0"); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1.0").get(); + VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1.0").get(); Assertions.assertNotNull(vmd); Assertions.assertEquals("1.0", vmd.getVersion()); - vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").get(); Assertions.assertNotNull(vmd); Assertions.assertEquals("1.0", vmd.getVersion()); } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupMetaDataTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupMetaDataTest.java index dfacee6235..ba4ce23dcb 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupMetaDataTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupMetaDataTest.java @@ -14,12 +14,12 @@ @QuarkusTest public class GroupMetaDataTest extends AbstractResourceTestBase { - + @Test public void createGroupWithMetadata() throws Exception { String groupId = UUID.randomUUID().toString(); Map labels = Map.of("label-1", "value-1", "label-2", "value-2"); - + Labels l = new Labels(); l.setAdditionalData(labels); @@ -28,7 +28,7 @@ public void createGroupWithMetadata() throws Exception { body.setDescription("My favorite test group."); body.setLabels(l); GroupMetaData gmd = clientV3.groups().post(body); - + Assertions.assertEquals(groupId, gmd.getGroupId()); Assertions.assertEquals("My favorite test group.", gmd.getDescription()); Assertions.assertEquals(labels, gmd.getLabels().getAdditionalData()); @@ -38,7 +38,7 @@ public void createGroupWithMetadata() throws Exception { public void getGroupMetadata() throws Exception { String groupId = UUID.randomUUID().toString(); Map labels = Map.of("label-1", "value-1", "label-2", "value-2"); - + Labels l = new Labels(); l.setAdditionalData(labels); @@ -47,10 +47,10 @@ public void getGroupMetadata() throws Exception { body.setDescription("My favorite test group."); body.setLabels(l); clientV3.groups().post(body); - + // Now fetch the metadata GroupMetaData gmd = clientV3.groups().byGroupId(groupId).get(); - + Assertions.assertEquals(groupId, gmd.getGroupId()); Assertions.assertEquals("My favorite test group.", gmd.getDescription()); Assertions.assertEquals(labels, gmd.getLabels().getAdditionalData()); @@ -60,8 +60,9 @@ public void getGroupMetadata() throws Exception { public void updateGroupMetadata() throws Exception { String groupId = UUID.randomUUID().toString(); Map labels1 = Map.of("label-1", "value-1", "label-2", "value-2"); - Map labels2 = Map.of("label-5", "value-5", "label-6", "value-6", "label-7", "value-7"); - + Map labels2 = Map.of("label-5", "value-5", "label-6", "value-6", "label-7", + "value-7"); + Labels l = new Labels(); l.setAdditionalData(labels1); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupsResourceTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupsResourceTest.java index 8637661be6..157d128548 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupsResourceTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/GroupsResourceTest.java @@ -79,131 +79,97 @@ public void testDefaultGroup() throws Exception { String group = "testDefaultGroup"; // Create artifacts in null (default) group - createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/2", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/3", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/4", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/5", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/2", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/3", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/4", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(defaultGroup, "testDefaultGroup/EmptyAPI/5", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); // Create 2 artifacts in other group - createArtifact(group, "testDefaultGroup/EmptyAPI/1", ArtifactType.OPENAPI, jsonArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "testDefaultGroup/EmptyAPI/2", ArtifactType.OPENAPI, jsonArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, "testDefaultGroup/EmptyAPI/1", ArtifactType.OPENAPI, jsonArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "testDefaultGroup/EmptyAPI/2", ArtifactType.OPENAPI, jsonArtifactContent, + ContentTypes.APPLICATION_JSON); // Search each group to ensure the correct # of artifacts. - given() - .when() - .queryParam("groupId", defaultGroup) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", greaterThanOrEqualTo(5)); - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(2)); + given().when().queryParam("groupId", defaultGroup).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", greaterThanOrEqualTo(5)); + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(2)); // Get the artifact content - given() - .when() - .pathParam("groupId", defaultGroup) + given().when().pathParam("groupId", defaultGroup) .pathParam("artifactId", "testDefaultGroup/EmptyAPI/1") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); } @Test public void testCreateArtifactRule() throws Exception { String oaiArtifactContent = resourceToString("openapi-empty.json"); - createArtifact("testCreateArtifactRule", "testCreateArtifactRule/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact("testCreateArtifactRule", "testCreateArtifactRule/EmptyAPI/1", ArtifactType.OPENAPI, + oaiArtifactContent, ContentTypes.APPLICATION_JSON); - //Test Rule type null + // Test Rule type null CreateRule nullType = new CreateRule(); nullType.setRuleType(null); nullType.setConfig("TestConfig"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testCreateArtifactRule") - .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1") - .body(nullType) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(400); - - //Test Rule config null + given().when().contentType(CT_JSON).pathParam("groupId", "testCreateArtifactRule") + .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1").body(nullType) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(400); + + // Test Rule config null CreateRule nullConfig = new CreateRule(); nullConfig.setRuleType(RuleType.VALIDITY); nullConfig.setConfig(null); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testCreateArtifactRule") - .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1") - .body(nullConfig) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(400); - - //Test Rule config empty + given().when().contentType(CT_JSON).pathParam("groupId", "testCreateArtifactRule") + .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1").body(nullConfig) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(400); + + // Test Rule config empty CreateRule emptyConfig = new CreateRule(); emptyConfig.setRuleType(RuleType.VALIDITY); emptyConfig.setConfig(""); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testCreateArtifactRule") - .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1") - .body(emptyConfig) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(400); + given().when().contentType(CT_JSON).pathParam("groupId", "testCreateArtifactRule") + .pathParam("artifactId", "testCreateArtifactRule/EmptyAPI/1").body(emptyConfig) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(400); } @Test public void testUpdateArtifactOwner() throws Exception { String oaiArtifactContent = resourceToString("openapi-empty.json"); - createArtifact("testUpdateArtifactOwner", "testUpdateArtifactOwner/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact("testUpdateArtifactOwner", "testUpdateArtifactOwner/EmptyAPI/1", ArtifactType.OPENAPI, + oaiArtifactContent, ContentTypes.APPLICATION_JSON); EditableArtifactMetaData body = new EditableArtifactMetaData(); body.setOwner("newOwner"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateArtifactOwner") - .pathParam("artifactId", "testUpdateArtifactOwner/EmptyAPI/1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); - + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateArtifactOwner") + .pathParam("artifactId", "testUpdateArtifactOwner/EmptyAPI/1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); + // TODO verify that the owner was changed. } @Test public void testUpdateEmptyArtifactOwner() throws Exception { String oaiArtifactContent = resourceToString("openapi-empty.json"); - createArtifact("testUpdateEmptyArtifactOwner", "testUpdateEmptyArtifactOwner/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact("testUpdateEmptyArtifactOwner", "testUpdateEmptyArtifactOwner/EmptyAPI/1", + ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); EditableArtifactMetaData body = new EditableArtifactMetaData(); body.setOwner(""); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateEmptyArtifactOwner") - .pathParam("artifactId", "testUpdateEmptyArtifactOwner/EmptyAPI/1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(400); + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateEmptyArtifactOwner") + .pathParam("artifactId", "testUpdateEmptyArtifactOwner/EmptyAPI/1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(400); } @Test @@ -215,92 +181,58 @@ public void testMultipleGroups() throws Exception { String group2 = "testMultipleGroups_2"; // Create 5 artifacts in Group 1 - createArtifact(group1, "testMultipleGroups/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group1, "testMultipleGroups/EmptyAPI/2", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group1, "testMultipleGroups/EmptyAPI/3", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group1, "testMultipleGroups/EmptyAPI/4", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group1, "testMultipleGroups/EmptyAPI/5", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group1, "testMultipleGroups/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group1, "testMultipleGroups/EmptyAPI/2", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group1, "testMultipleGroups/EmptyAPI/3", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group1, "testMultipleGroups/EmptyAPI/4", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group1, "testMultipleGroups/EmptyAPI/5", ArtifactType.OPENAPI, oaiArtifactContent, + ContentTypes.APPLICATION_JSON); // Create 2 artifacts in Group 2 - createArtifact(group2, "testMultipleGroups/EmptyAPI/1", ArtifactType.OPENAPI, jsonArtifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group2, "testMultipleGroups/EmptyAPI/2", ArtifactType.OPENAPI, jsonArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group2, "testMultipleGroups/EmptyAPI/1", ArtifactType.OPENAPI, jsonArtifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group2, "testMultipleGroups/EmptyAPI/2", ArtifactType.OPENAPI, jsonArtifactContent, + ContentTypes.APPLICATION_JSON); // Get group 1 metadata - given() - .when() - .pathParam("groupId", group1) - .get("/registry/v3/groups/{groupId}") - .then() - .statusCode(200) - .body("groupId", equalTo("testMultipleGroups_1")); + given().when().pathParam("groupId", group1).get("/registry/v3/groups/{groupId}").then() + .statusCode(200).body("groupId", equalTo("testMultipleGroups_1")); // Get group 2 metadata - given() - .when() - .pathParam("groupId", group2) - .get("/registry/v3/groups/{groupId}") - .then() - .statusCode(200) - .body("groupId", equalTo("testMultipleGroups_2")); + given().when().pathParam("groupId", group2).get("/registry/v3/groups/{groupId}").then() + .statusCode(200).body("groupId", equalTo("testMultipleGroups_2")); // Search each group to ensure the correct # of artifacts. - given() - .when() - .queryParam("groupId", group1) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)); - given() - .when() - .queryParam("groupId", group2) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(2)); + given().when().queryParam("groupId", group1).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(5)); + given().when().queryParam("groupId", group2).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(2)); // Get the artifact content - given() - .when() - .pathParam("groupId", group1) - .pathParam("artifactId", "testMultipleGroups/EmptyAPI/1") + given().when().pathParam("groupId", group1).pathParam("artifactId", "testMultipleGroups/EmptyAPI/1") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); - //Test delete group operations + // Test delete group operations // Delete group 1 metadata - given() - .when() - .pathParam("groupId", group1) - .delete("/registry/v3/groups/{groupId}") - .then() + given().when().pathParam("groupId", group1).delete("/registry/v3/groups/{groupId}").then() .statusCode(204); // Delete group 2 metadata - given() - .when() - .pathParam("groupId", group2) - .delete("/registry/v3/groups/{groupId}") - .then() + given().when().pathParam("groupId", group2).delete("/registry/v3/groups/{groupId}").then() .statusCode(204); // Get group 1 metadata again, should return 404 - given() - .when() - .pathParam("groupId", group1) - .get("/registry/v3/groups/{groupId}") - .then() + given().when().pathParam("groupId", group1).get("/registry/v3/groups/{groupId}").then() .statusCode(404); // Get group 1 metadata again, should return 404 - given() - .when() - .pathParam("groupId", group2) - .get("/registry/v3/groups/{groupId}") - .then() + given().when().pathParam("groupId", group2).get("/registry/v3/groups/{groupId}").then() .statusCode(404); } @@ -309,115 +241,73 @@ public void testCreateArtifact() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - indicate the type via a header param - createArtifact(GROUP, "testCreateArtifact/EmptyAPI/1", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testCreateArtifact/EmptyAPI/1", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Create OpenAPI artifact - indicate the type via the content-type - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI/2", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("artifact.groupId", equalTo(GROUP)) - .body("version.version", equalTo("1")) + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + "testCreateArtifact/EmptyAPI/2", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("artifact.groupId", equalTo(GROUP)).body("version.version", equalTo("1")) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI/2")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); // Try to create a duplicate artifact ID (should fail) - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) - .body("message", equalTo("An artifact with ID 'testCreateArtifact/EmptyAPI/2' in group 'GroupsResourceTest' already exists.")); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(409) + .body("error_code", equalTo(409)).body("message", equalTo( + "An artifact with ID 'testCreateArtifact/EmptyAPI/2' in group 'GroupsResourceTest' already exists.")); // Try to create an artifact with an invalid artifact type - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI/invalidArtifactType", "INVALID_ARTIFACT_TYPE", artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(artifactContent) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(400); + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI/invalidArtifactType", + "INVALID_ARTIFACT_TYPE", artifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(artifactContent) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(400); // Create OpenAPI artifact - don't provide the artifact type - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI/detect", null, artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI/detect", null, + artifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI/detect")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); // Create artifact with empty content (should fail) - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyContent", null, "", ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(400); + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyContent", null, "", + ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(400); // Create OpenAPI artifact - provide a custom version # - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customVersion", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customVersion", + ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setVersion("1.0.2"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("artifact.groupId", equalTo(GROUP)) - .body("version.version", equalTo("1.0.2")) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("artifact.groupId", equalTo(GROUP)).body("version.version", equalTo("1.0.2")) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI-customVersion")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); // Create OpenAPI artifact - provide a custom name String customName = "CUSTOM NAME"; - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customName", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customName", + ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); createArtifact.setName(customName); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("artifact.groupId", equalTo(GROUP)) - .body("artifact.name", equalTo(customName)) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("artifact.groupId", equalTo(GROUP)).body("artifact.name", equalTo(customName)) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI-customName")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); // Create OpenAPI artifact - provide a custom description String customDescription = "CUSTOM DESCRIPTION"; - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customDescription", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customDescription", + ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); createArtifact.setDescription(customDescription); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.groupId", equalTo(GROUP)) .body("artifact.description", equalTo(customDescription)) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI-customDescription")) @@ -432,16 +322,12 @@ public void testCreateArtifactNoAscii() { // Create OpenAPI artifact - provide a custom No-ASCII name String customNoASCIIName = "CUSTOM NAME with NO-ASCII char č"; - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customNameEncoded", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + "testCreateArtifact/EmptyAPI-customNameEncoded", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); createArtifact.setName(customNoASCIIName); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.groupId", equalTo(GROUP)) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI-customNameEncoded")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)) @@ -449,16 +335,12 @@ public void testCreateArtifactNoAscii() { // Create OpenAPI artifact - provide a custom No-ASCII description String customNoASCIIDescription = "CUSTOM DESCRIPTION with NO-ASCII char č"; - createArtifact = TestUtils.serverCreateArtifact("testCreateArtifact/EmptyAPI-customDescriptionEncoded", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.serverCreateArtifact( + "testCreateArtifact/EmptyAPI-customDescriptionEncoded", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); createArtifact.setDescription(customNoASCIIDescription); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.groupId", equalTo(GROUP)) .body("artifact.artifactId", equalTo("testCreateArtifact/EmptyAPI-customDescriptionEncoded")) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)) @@ -470,29 +352,20 @@ public void testGetArtifact() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, "testGetArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testGetArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Get the artifact content - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifact/EmptyAPI") + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifact/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); // Try to get artifact content for an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifact/MissingAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() - .statusCode(404) - .body("error_code", equalTo(404)) - .body("message", equalTo("No version '' found for artifact with ID 'testGetArtifact/MissingAPI' in group 'GroupsResourceTest'.")); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifact/MissingAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() + .statusCode(404).body("error_code", equalTo(404)).body("message", equalTo( + "No version '' found for artifact with ID 'testGetArtifact/MissingAPI' in group 'GroupsResourceTest'.")); } @Test @@ -501,69 +374,42 @@ public void testUpdateArtifact() throws Exception { String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); // Create OpenAPI artifact - createArtifact(GROUP, "testUpdateArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testUpdateArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update OpenAPI artifact (new version) - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("artifactId", equalTo("testUpdateArtifact/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); // Get the artifact content (should be the updated content) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testUpdateArtifact/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API (Updated)")); // Try to update an artifact that doesn't exist. createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ArtifactType.OPENAPI); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/MissingAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(404); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/MissingAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(404); // Try to update an artifact with empty content createVersion = TestUtils.serverCreateVersion("", ArtifactType.OPENAPI); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(400); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(400); // Update OpenAPI artifact with a custom version createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ArtifactType.OPENAPI); createVersion.setVersion("3.0.0.Final"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("version", equalTo("3.0.0.Final")) .body("artifactId", equalTo("testUpdateArtifact/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); @@ -572,32 +418,19 @@ public void testUpdateArtifact() throws Exception { String customName = "CUSTOM NAME"; createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ArtifactType.OPENAPI); createVersion.setName(customName); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("name", equalTo(customName)) - .body("artifactId", equalTo("testUpdateArtifact/EmptyAPI")) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("name", equalTo(customName)).body("artifactId", equalTo("testUpdateArtifact/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); // Update OpenAPI artifact with a custom description String customDescription = "CUSTOM DESCRIPTION"; createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ArtifactType.OPENAPI); createVersion.setDescription(customDescription); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifact/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifact/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("description", equalTo(customDescription)) .body("artifactId", equalTo("testUpdateArtifact/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); @@ -607,88 +440,60 @@ public void testUpdateArtifact() throws Exception { @Test public void testUpdateVersionState() throws Exception { String oaiArtifactContent = resourceToString("openapi-empty.json"); - createArtifact("testUpdateVersionState", "testUpdateVersionState/EmptyAPI/1", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact("testUpdateVersionState", "testUpdateVersionState/EmptyAPI/1", ArtifactType.OPENAPI, + oaiArtifactContent, ContentTypes.APPLICATION_JSON); EditableVersionMetaData body = new EditableVersionMetaData(); body.setState(VersionState.DEPRECATED); // Update the artifact state to DEPRECATED. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateVersionState") - .pathParam("artifactId", "testUpdateVersionState/EmptyAPI/1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateVersionState") + .pathParam("artifactId", "testUpdateVersionState/EmptyAPI/1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() .statusCode(204); // Update the artifact state to DEPRECATED again. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateVersionState") - .pathParam("artifactId", "testUpdateVersionState/EmptyAPI/1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateVersionState") + .pathParam("artifactId", "testUpdateVersionState/EmptyAPI/1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() .statusCode(204); // Send a GET request to check if the artifact state is DEPRECATED. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateVersionState") + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateVersionState") .pathParam("artifactId", "testUpdateVersionState/EmptyAPI/1") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .header("X-Registry-Deprecated", "true"); + .then().statusCode(200).header("X-Registry-Deprecated", "true"); } @Test public void testUpdateArtifactVersionState() throws Exception { String oaiArtifactContent = resourceToString("openapi-empty.json"); - createArtifact("testUpdateArtifactVersionState", "testUpdateArtifactVersionState/EmptyAPI", ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact("testUpdateArtifactVersionState", "testUpdateArtifactVersionState/EmptyAPI", + ArtifactType.OPENAPI, oaiArtifactContent, ContentTypes.APPLICATION_JSON); EditableVersionMetaData body = new EditableVersionMetaData(); body.setState(VersionState.DEPRECATED); // Update the artifact state to DEPRECATED. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateArtifactVersionState") + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateArtifactVersionState") .pathParam("artifactId", "testUpdateArtifactVersionState/EmptyAPI") - .pathParam("versionId", "1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{versionId}") - .then() + .pathParam("versionId", "1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{versionId}").then() .statusCode(204); // Update the artifact state to DEPRECATED again. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateArtifactVersionState") + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateArtifactVersionState") .pathParam("artifactId", "testUpdateArtifactVersionState/EmptyAPI") - .pathParam("versionId", "1") - .body(body) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{versionId}") - .then() + .pathParam("versionId", "1").body(body) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{versionId}").then() .statusCode(204); // Send a GET request to check if the artifact state is DEPRECATED. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", "testUpdateArtifactVersionState") + given().when().contentType(CT_JSON).pathParam("groupId", "testUpdateArtifactVersionState") .pathParam("artifactId", "testUpdateArtifactVersionState/EmptyAPI") .pathParam("versionId", "1") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{versionId}/content") - .then() - .statusCode(200) - .header("X-Registry-Deprecated", "true"); + .then().statusCode(200).header("X-Registry-Deprecated", "true"); } @Test @@ -699,21 +504,17 @@ public void testUpdateArtifactNoAscii() throws Exception { String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); // Create OpenAPI artifact - createArtifact(GROUP, "testUpdateArtifactNoAscii/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testUpdateArtifactNoAscii/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update OpenAPI artifact with a custom no-ascii name String customNoASCIIName = "CUSTOM NAME with NO-ASCII char ě"; - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setName(customNoASCIIName); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testUpdateArtifactNoAscii/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testUpdateArtifactNoAscii/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("name", equalTo(customNoASCIIName)) .body("artifactId", equalTo("testUpdateArtifactNoAscii/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); @@ -722,17 +523,12 @@ public void testUpdateArtifactNoAscii() throws Exception { String customNoASCIIDescription = "CUSTOM DESCRIPTION with NO-ASCII char ě"; createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setDescription(customNoASCIIDescription); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .header("X-Registry-ArtifactType", ArtifactType.OPENAPI) - .header("X-Registry-Description-Encoded", Base64.encode(customNoASCIIDescription.getBytes(StandardCharsets.UTF_8))) - .pathParam("artifactId", "testUpdateArtifactNoAscii/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + .header("X-Registry-Description-Encoded", + Base64.encode(customNoASCIIDescription.getBytes(StandardCharsets.UTF_8))) + .pathParam("artifactId", "testUpdateArtifactNoAscii/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("description", equalTo(customNoASCIIDescription)) .body("artifactId", equalTo("testUpdateArtifactNoAscii/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); @@ -743,47 +539,28 @@ public void testDeleteArtifact() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, "testDeleteArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testDeleteArtifact/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Make sure we can get the artifact content - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifact/EmptyAPI") + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testDeleteArtifact/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); // Delete the artifact - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifact/EmptyAPI") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testDeleteArtifact/EmptyAPI") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); // Try to get artifact for an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifact/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(404) - .body("error_code", equalTo(404)) - .body("message", equalTo("No artifact with ID 'testDeleteArtifact/EmptyAPI' in group 'GroupsResourceTest' was found.")); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testDeleteArtifact/EmptyAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(404) + .body("error_code", equalTo(404)).body("message", equalTo( + "No artifact with ID 'testDeleteArtifact/EmptyAPI' in group 'GroupsResourceTest' was found.")); // Try to delete an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifact/MissingAPI") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(404); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testDeleteArtifact/MissingAPI") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(404); } @Test @@ -792,109 +569,67 @@ public void testDeleteArtifactVersion() throws Exception { String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); // Create OpenAPI artifact - createArtifact(GROUP, "testDeleteArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testDeleteArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Make sure we can get the artifact content - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); // Create a new version of the artifact - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("version", equalTo("2")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)); - - //Get the artifact version 1 - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "1") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo("Empty API")); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("version", equalTo("2")).body("artifactType", equalTo(ArtifactType.OPENAPI)); + + // Get the artifact version 1 + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "1") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content").then() + .statusCode(200).body("openapi", equalTo("3.0.2")).body("info.title", equalTo("Empty API")); // Delete the artifact version 1 - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "1") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "1") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(204); // Try to get artifact version 1 that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "1") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(404) - .body("error_code", equalTo(404)) - .body("message", equalTo("No version '1' found for artifact with ID 'testDeleteArtifactVersion/EmptyAPI' in group 'GroupsResourceTest'.")); - - //Get the artifact version 2 - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "2") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "1") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(404).body("error_code", equalTo(404)).body("message", equalTo( + "No version '1' found for artifact with ID 'testDeleteArtifactVersion/EmptyAPI' in group 'GroupsResourceTest'.")); + + // Get the artifact version 2 + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "2") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content").then() + .statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API (Updated)")); // Delete the artifact version 2 - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "2") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "2") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(204); // Try to get artifact version 2 that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "2") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(404) - .body("error_code", equalTo(404)) - .body("message", equalTo("No version '2' found for artifact with ID 'testDeleteArtifactVersion/EmptyAPI' in group 'GroupsResourceTest'.")); + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "2") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(404).body("error_code", equalTo(404)).body("message", equalTo( + "No version '2' found for artifact with ID 'testDeleteArtifactVersion/EmptyAPI' in group 'GroupsResourceTest'.")); // Try to delete an artifact version 2 that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "2") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "2") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(404); } @@ -904,35 +639,24 @@ public void testDeleteArtifactsInGroup() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create several artifacts in the group. - createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Make sure we can search for all three artifacts in the group. - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(3)); + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(3)); // Delete the artifacts in the group - given() - .when() - .pathParam("groupId", group) - .delete("/registry/v3/groups/{groupId}/artifacts") - .then() + given().when().pathParam("groupId", group).delete("/registry/v3/groups/{groupId}/artifacts").then() .statusCode(204); // Verify that all 3 artifacts were deleted - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)); + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(0)); } @Test @@ -941,35 +665,24 @@ public void testDeleteGroupWithArtifacts() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create several artifacts in the group. - createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Make sure we can search for all three artifacts in the group. - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(3)); + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(3)); // Delete the *group* (should delete all artifacts) - given() - .when() - .pathParam("groupId", group) - .delete("/registry/v3/groups/{groupId}") - .then() + given().when().pathParam("groupId", group).delete("/registry/v3/groups/{groupId}").then() .statusCode(204); // Verify that all 3 artifacts were deleted - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)); + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(0)); } @Test @@ -978,41 +691,32 @@ public void testListArtifactsInGroup() throws Exception { String group = "testListArtifactsInGroup"; // Create several artifacts in a group. - createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-1", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-2", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-3", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // List the artifacts in the group - given() - .when() - .pathParam("groupId", group) - .get("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(3)); + given().when().pathParam("groupId", group).get("/registry/v3/groups/{groupId}/artifacts").then() + .statusCode(200).body("count", equalTo(3)); // Add two more artifacts to the group. - createArtifact(group, "EmptyAPI-4", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - createArtifact(group, "EmptyAPI-5", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-4", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + createArtifact(group, "EmptyAPI-5", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // List the artifacts in the group again - given() - .when() - .pathParam("groupId", group) - .get("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)); + given().when().pathParam("groupId", group).get("/registry/v3/groups/{groupId}/artifacts").then() + .statusCode(200).body("count", equalTo(5)); // Try to list artifacts for a group that doesn't exist // List the artifacts in the group - given() - .when() - .pathParam("groupId", group + "-doesnotexist") - .get("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + given().when().pathParam("groupId", group + "-doesnotexist") + .get("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("count", equalTo(0)); } @@ -1023,45 +727,31 @@ public void testListArtifactVersions() throws Exception { String artifactId = "testListArtifactVersions/EmptyAPI"; // Create an artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update the artifact 5 times for (int idx = 0; idx < 5; idx++) { String versionContent = artifactContent.replace("Empty API", "Empty API (Update " + idx + ")"); - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(versionContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("artifactId", equalTo(artifactId)) + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(versionContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", artifactId).body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then() + .statusCode(200).body("artifactId", equalTo(artifactId)) .body("artifactType", equalTo(ArtifactType.OPENAPI)); } // List the artifact versions - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() -// .log().all() - .statusCode(200) - .body("count", equalTo(6)) - .body("versions[0].version", notNullValue()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then() + // .log().all() + .statusCode(200).body("count", equalTo(6)).body("versions[0].version", notNullValue()); // Try to list artifact versions for an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testListArtifactVersions/MissingAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(404); + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(404); } @@ -1071,102 +761,61 @@ public void testCreateArtifactVersion() throws Exception { String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); // Create OpenAPI artifact - createArtifact(GROUP, "testCreateArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testCreateArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Create a new version of the artifact - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("version", equalTo("2")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("version", equalTo("2")).body("artifactType", equalTo(ArtifactType.OPENAPI)); // Get the artifact content (should be the updated content) - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API (Updated)")); // Try to create a new version of an artifact that doesn't exist. createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testCreateArtifactVersion/MissingAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(404); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testCreateArtifactVersion/MissingAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(404); // Try to create a new version of the artifact with empty content createVersion = TestUtils.serverCreateVersion("", ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(400); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(400); // Create another new version of the artifact with a custom version # createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setVersion("3.0.0.Final"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("version", equalTo("3.0.0.Final")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("version", equalTo("3.0.0.Final")).body("artifactType", equalTo(ArtifactType.OPENAPI)); // Create another new version of the artifact with a custom name String customName = "CUSTOM NAME"; createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setName(customName); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .header("X-Registry-Name", customName) - .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).header("X-Registry-Name", customName) + .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("name", equalTo(customName)); // Create another new version of the artifact with a custom description String customDescription = "CUSTOM DESCRIPTION"; createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setDescription(customDescription); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .header("X-Registry-Description", customDescription) - .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + .pathParam("artifactId", "testCreateArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("description", equalTo(customDescription)); } @@ -1179,37 +828,28 @@ public void testCreateArtifactVersionNoAscii() throws Exception { String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); // Create OpenAPI artifact - createArtifact(GROUP, "testCreateArtifactVersionNoAscii/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testCreateArtifactVersionNoAscii/EmptyAPI", ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); // Create another new version of the artifact with a custom No-ASCII name and description String customNameNoASCII = "CUSTOM NAME WITH NO-ASCII CHAR ě"; String customDescriptionNoASCII = "CUSTOM DESCRIPTION WITH NO-ASCII CHAR ě"; - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); createVersion.setName(customNameNoASCII); createVersion.setDescription(customDescriptionNoASCII); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testCreateArtifactVersionNoAscii/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testCreateArtifactVersionNoAscii/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) .body("name", equalTo(customNameNoASCII)) .body("description", equalTo(customDescriptionNoASCII)); // Get artifact metadata (should have the custom name and description) - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .pathParam("artifactId", "testCreateArtifactVersionNoAscii/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() - .statusCode(200) - .body("name", equalTo(customNameNoASCII)) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() + .statusCode(200).body("name", equalTo(customNameNoASCII)) .body("description", equalTo(customDescriptionNoASCII)); } @@ -1224,41 +864,23 @@ public void testCreateArtifactVersionValidityRuleViolation() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createRule) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createRule) .pathParam("artifactId", artifactId) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204) .body(anything()); // Verify the rule was added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); // Create a new version of the artifact with invalid syntax - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(artifactContentInvalidSyntax, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(artifactContentInvalidSyntax, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createVersion).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") + .then().statusCode(409).body("error_code", equalTo(409)) .body("message", startsWith("Syntax or semantic violation for JSON Schema artifact.")); } @@ -1273,43 +895,24 @@ public void testCreateArtifactVersionCompatibilityRuleViolation() throws Excepti CreateRule rule = new CreateRule(); rule.setRuleType(RuleType.COMPATIBILITY); rule.setConfig("BACKWARD"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(rule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(rule).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + .statusCode(204).body(anything()); // Verify the rule was added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("BACKWARD")); // Create a new version of the artifact with invalid syntax - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(artifactContentInvalidSyntax, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) - .body("message", startsWith("Incompatible artifact: testCreateArtifact/ValidJson [JSON], num" + - " of incompatible diffs: {1}, list of diff types: [SUBSCHEMA_TYPE_CHANGED at /properties/age]")) + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(artifactContentInvalidSyntax, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createVersion).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") + .then().statusCode(409).body("error_code", equalTo(409)) + .body("message", startsWith("Incompatible artifact: testCreateArtifact/ValidJson [JSON], num" + + " of incompatible diffs: {1}, list of diff types: [SUBSCHEMA_TYPE_CHANGED at /properties/age]")) .body("causes[0].description", equalTo(DiffType.SUBSCHEMA_TYPE_CHANGED.getDescription())) .body("causes[0].context", equalTo("/properties/age")); @@ -1320,24 +923,20 @@ public void testGetArtifactVersion() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create an artifact - createArtifact(GROUP, "testGetArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testGetArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Update the artifact 5 times List versions = new ArrayList<>(); for (int idx = 0; idx < 5; idx++) { - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(artifactContent.replace("Empty API", "Empty API (Update " + idx + ")"), ContentTypes.APPLICATION_JSON); - String version = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactVersion/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("artifactId", equalTo("testGetArtifactVersion/EmptyAPI")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .extract().body().path("version"); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion( + artifactContent.replace("Empty API", "Empty API (Update " + idx + ")"), + ContentTypes.APPLICATION_JSON); + String version = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testGetArtifactVersion/EmptyAPI").body(createVersion) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then() + .statusCode(200).body("artifactId", equalTo("testGetArtifactVersion/EmptyAPI")) + .body("artifactType", equalTo(ArtifactType.OPENAPI)).extract().body().path("version"); versions.add(version); } @@ -1345,35 +944,22 @@ public void testGetArtifactVersion() throws Exception { for (int idx = 0; idx < 5; idx++) { String version = versions.get(idx); String expected = "Empty API (Update " + idx + ")"; - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactVersion/EmptyAPI") - .pathParam("version", version) + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testGetArtifactVersion/EmptyAPI").pathParam("version", version) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content") - .then() - .statusCode(200) - .body("info.title", equalTo(expected)); + .then().statusCode(200).body("info.title", equalTo(expected)); } // Now get a version that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactVersion/EmptyAPI") + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactVersion/EmptyAPI") .pathParam("version", 12345) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(404); // Now get a version of an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactVersion/MissingAPI") - .pathParam("version", "1") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testGetArtifactVersion/MissingAPI").pathParam("version", "1") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(404); } @@ -1383,184 +969,100 @@ public void testArtifactRules() throws Exception { String artifactId = "testArtifactRules/EmptyAPI"; // Create an artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Add a rule CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createRule) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createRule) .pathParam("artifactId", artifactId) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204) .body(anything()); // Verify the rule was added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); // Try to add the rule again - should get a 409 - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createRule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createRule).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + .statusCode(409).body("error_code", equalTo(409)) .body("message", equalTo("A rule named 'VALIDITY' already exists.")); // Add another rule createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig("BACKWARD"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createRule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createRule).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + .statusCode(204).body(anything()); // Verify the rule was added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("BACKWARD")); // Get the list of rules (should be 2 of them) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(200) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(200) .contentType(ContentType.JSON) .body("[0]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))) - .body("[1]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))) - .body("[2]", nullValue()); + .body("[1]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))).body("[2]", nullValue()); // Update a rule's config Rule updateRule = new Rule(); updateRule.setRuleType(RuleType.COMPATIBILITY); updateRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .body(updateRule) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("FULL")); // Get a single (updated) rule by name - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("FULL")); // Delete a rule - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(204) - .body(anything()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(204).body(anything()); // Get a single (deleted) rule by name (should fail with a 404) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/COMPATIBILITY").then() + .statusCode(404).contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'COMPATIBILITY' was found.")); // Get the list of rules (should be 1 of them) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() -// .log().all() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))) - .body("[1]", nullValue()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + // .log().all() + .statusCode(200).contentType(ContentType.JSON) + .body("[0]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))).body("[1]", nullValue()); // Delete all rules - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204); // Get the list of rules (no rules now) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", nullValue()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(200) + .contentType(ContentType.JSON).body("[0]", nullValue()); // Add a rule to an artifact that doesn't exist. createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "MissingArtifact") - .body(createRule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(404) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "MissingArtifact").body(createRule) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(404) .body(anything()); } @@ -1570,113 +1072,60 @@ public void testDeleteAllArtifactRules() throws Exception { String artifactId = "testDeleteAllArtifactRules/EmptyAPI"; // Create an artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Add the Validity rule CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createRule) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createRule) .pathParam("artifactId", artifactId) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204) .body(anything()); // Add the Integrity rule createRule.setRuleType(RuleType.INTEGRITY); createRule.setConfig(IntegrityLevel.NO_DUPLICATES.name()); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createRule) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createRule) .pathParam("artifactId", artifactId) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204) .body(anything()); // Verify the rules were added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("INTEGRITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("INTEGRITY")) .body("config", equalTo("NO_DUPLICATES")); // Get the list of rules (should be 2 of them) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", anyOf(equalTo("VALIDITY"), equalTo("INTEGRITY"))) - .body("[1]", anyOf(equalTo("VALIDITY"), equalTo("INTEGRITY"))) - .body("[2]", nullValue()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(200) + .contentType(ContentType.JSON).body("[0]", anyOf(equalTo("VALIDITY"), equalTo("INTEGRITY"))) + .body("[1]", anyOf(equalTo("VALIDITY"), equalTo("INTEGRITY"))).body("[2]", nullValue()); // Delete all rules - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204); // Make sure the rules were deleted - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() + .statusCode(404).contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'VALIDITY' was found.")); - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY").then() + .statusCode(404).contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'INTEGRITY' was found.")); // Get the list of rules (no rules now) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", nullValue()); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(200) + .contentType(ContentType.JSON).body("[0]", nullValue()); } @Test @@ -1684,98 +1133,62 @@ public void testArtifactMetaData() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, "testGetArtifactMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON, (ca) -> { - ca.setName("Empty API"); - ca.setDescription("An example API design using OpenAPI."); - }); + createArtifact(GROUP, "testGetArtifactMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON, (ca) -> { + ca.setName("Empty API"); + ca.setDescription("An example API design using OpenAPI."); + }); // Get the artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(200) - .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")) - .body("version", anything()) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .body("createdOn", anything()) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(200) + .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")).body("version", anything()) + .body("artifactType", equalTo(ArtifactType.OPENAPI)).body("createdOn", anything()) .body("name", equalTo("Empty API")) - .body("description", equalTo("An example API design using OpenAPI.")) - .extract() + .body("description", equalTo("An example API design using OpenAPI.")).extract() .as(ArtifactMetaData.class); // Try to get artifact meta-data for an artifact that doesn't exist. - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testGetArtifactMetaData/MissingAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(404) - .body("error_code", equalTo(404)) - .body("message", equalTo("No artifact with ID 'testGetArtifactMetaData/MissingAPI' in group 'GroupsResourceTest' was found.")); + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(404) + .body("error_code", equalTo(404)).body("message", equalTo( + "No artifact with ID 'testGetArtifactMetaData/MissingAPI' in group 'GroupsResourceTest' was found.")); // Update the artifact meta-data - EditableArtifactMetaData amd = EditableArtifactMetaData.builder() - .name("Empty API Name") + EditableArtifactMetaData amd = EditableArtifactMetaData.builder().name("Empty API Name") .description("Empty API description.") - .labels(Map.of("additionalProp1", "Empty API additional property")) - .build(); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .body(amd) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); - + .labels(Map.of("additionalProp1", "Empty API additional property")).build(); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI").body(amd) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); // Get the (updated) artifact meta-data Map expectedLabels = new HashMap<>(); expectedLabels.put("additionalProp1", "Empty API additional property"); - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(200) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(200) .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")) .body("name", equalTo("Empty API Name")) .body("description", equalTo("Empty API description.")) .body("labels", equalToObject(expectedLabels)); - // Update the artifact content (new version) and then make sure the name/description meta-data is still available + // Update the artifact content (new version) and then make sure the name/description meta-data is + // still available String updatedArtifactContent = artifactContent.replace("Empty API", "Empty API (Updated)"); - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .header("X-Registry-ArtifactType", ArtifactType.OPENAPI) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")) + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedArtifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).header("X-Registry-ArtifactType", ArtifactType.OPENAPI) + .pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") + .body(createVersion).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") + .then().statusCode(200).body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")) .body("artifactType", equalTo(ArtifactType.OPENAPI)); // Verify the artifact meta-data name and description are still set. - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(200) - .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")) - .body("version", anything()) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testGetArtifactMetaData/EmptyAPI") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(200) + .body("artifactId", equalTo("testGetArtifactMetaData/EmptyAPI")).body("version", anything()) .body("name", equalTo("Empty API Name")) .body("description", equalTo("Empty API description.")); } @@ -1788,7 +1201,8 @@ public void testLabelWithNullValue() throws Exception { int idx = 0; String title = "Empty API " + idx; String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); Map labels = new HashMap<>(); labels.put("test-key", null); @@ -1798,28 +1212,16 @@ public void testLabelWithNullValue() throws Exception { metaData.setName(title); metaData.setDescription("Some description of an API"); metaData.setLabels(labels); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", group) - .pathParam("artifactId", artifactId) - .body(metaData) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() + given().when().contentType(CT_JSON).pathParam("groupId", group).pathParam("artifactId", artifactId) + .body(metaData).put("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then() .statusCode(204); - + // Get the (updated) artifact meta-data Map expectedLabels = new HashMap<>(); expectedLabels.put("test-key", null); - given() - .when() - .pathParam("groupId", group) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(200) - .body("artifactId", equalTo(artifactId)) - .body("version", anything()) + given().when().pathParam("groupId", group).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(200) + .body("artifactId", equalTo(artifactId)).body("version", anything()) .body("labels", equalToObject(expectedLabels)); } @@ -1831,110 +1233,73 @@ public void testArtifactVersionMetaData() throws Exception { String updatedArtifactContent_v3 = artifactContent.replace("Empty API", "Empty API (VERSION 3)"); // Create OpenAPI artifact - createArtifact(GROUP, "testArtifactVersionMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testArtifactVersionMetaData/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Create a new version of the artifact - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion_v2 = TestUtils.serverCreateVersion(updatedArtifactContent_v2, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion_v2 = TestUtils + .serverCreateVersion(updatedArtifactContent_v2, ContentTypes.APPLICATION_JSON); createVersion_v2.setName("Empty API (VERSION 2)"); createVersion_v2.setDescription("An example API design using OpenAPI."); - String version2 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") - .body(createVersion_v2) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("version", notNullValue()) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .extract().body().path("version"); + String version2 = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI").body(createVersion_v2) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("version", notNullValue()).body("artifactType", equalTo(ArtifactType.OPENAPI)).extract() + .body().path("version"); // Create another new version of the artifact - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion_v3 = TestUtils.serverCreateVersion(updatedArtifactContent_v3, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion_v3 = TestUtils + .serverCreateVersion(updatedArtifactContent_v3, ContentTypes.APPLICATION_JSON); createVersion_v3.setName("Empty API (VERSION 3)"); createVersion_v3.setDescription("An example API design using OpenAPI."); - String version3 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") - .body(createVersion_v3) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("version", notNullValue()) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .extract().body().path("version"); + String version3 = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI").body(createVersion_v3) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("version", notNullValue()).body("artifactType", equalTo(ArtifactType.OPENAPI)).extract() + .body().path("version"); // Get meta-data for v2 - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") .pathParam("version", version2) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(200) - .body("version", equalTo(version2)) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .body("createdOn", anything()) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(200).body("version", equalTo(version2)) + .body("artifactType", equalTo(ArtifactType.OPENAPI)).body("createdOn", anything()) .body("name", equalTo("Empty API (VERSION 2)")) - .body("description", equalTo("An example API design using OpenAPI.")) - .extract() + .body("description", equalTo("An example API design using OpenAPI.")).extract() .as(VersionMetaData.class); // Update the version meta-data String metaData = "{\"name\": \"Updated Name\", \"description\": \"Updated description.\"}"; - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") - .pathParam("version", version2) - .body(metaData) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + .pathParam("version", version2).body(metaData) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(204); // Get the (updated) artifact meta-data - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") .pathParam("version", version2) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(200) - .body("version", equalTo(version2)) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .body("createdOn", anything()) - .body("name", equalTo("Updated Name")) - .body("description", equalTo("Updated description.")); + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(200).body("version", equalTo(version2)) + .body("artifactType", equalTo(ArtifactType.OPENAPI)).body("createdOn", anything()) + .body("name", equalTo("Updated Name")).body("description", equalTo("Updated description.")); // Get the version meta-data for the version we **didn't** update - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") .pathParam("version", version3) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(200) - .body("version", equalTo(version3)) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) - .body("createdOn", anything()) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(200).body("version", equalTo(version3)) + .body("artifactType", equalTo(ArtifactType.OPENAPI)).body("createdOn", anything()) .body("name", equalTo("Empty API (VERSION 3)")) .body("description", equalTo("An example API design using OpenAPI.")); // Get the version meta-data for a non-existent version - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI") - .pathParam("version", 12345) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testArtifactVersionMetaData/EmptyAPI").pathParam("version", 12345) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(404); } @@ -1946,35 +1311,23 @@ public void testYamlContentType() throws Exception { String artifactContent = resourceToString("openapi-empty.yaml"); // Create OpenAPI artifact (from YAML) - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_YAML); + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_YAML); createArtifact.setName("Empty API"); createArtifact.setDescription("An example API design using OpenAPI."); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("artifact.artifactId", equalTo(artifactId)) - .body("artifact.name", equalTo("Empty API")) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("artifact.artifactId", equalTo(artifactId)).body("artifact.name", equalTo("Empty API")) .body("artifact.description", equalTo("An example API design using OpenAPI.")) .body("artifact.artifactType", equalTo(artifactType)); // Get the artifact content (should still be YAML) RestAssured.registerParser("application/x-yaml", Parser.JSON); - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testYamlContentType") + given().when().pathParam("groupId", GROUP).pathParam("artifactId", "testYamlContentType") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .header("Content-Type", Matchers.containsString(CT_YAML)); + .then().statusCode(200).header("Content-Type", Matchers.containsString(CT_YAML)); } - @Test public void testWsdlArtifact() throws Exception { String artifactId = "testWsdlArtifact"; @@ -1982,27 +1335,17 @@ public void testWsdlArtifact() throws Exception { String artifactContent = resourceToString("sample.wsdl"); // Create WSDL artifact - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.WSDL, artifactContent, ContentTypes.APPLICATION_XML); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + artifactId, ArtifactType.WSDL, artifactContent, ContentTypes.APPLICATION_XML); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.artifactId", equalTo(artifactId)) .body("artifact.artifactType", equalTo(artifactType)); // Get the artifact content (should be XML) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .header("Content-Type", Matchers.containsString(CT_XML)); + .then().statusCode(200).header("Content-Type", Matchers.containsString(CT_XML)); } @Test @@ -2015,86 +1358,62 @@ public void testCreateAlreadyExistingArtifact() throws Exception { final String artifactDescription = "ArtifactDescriptionFromHeader"; // Create OpenAPI artifact - indicate the type via a header param - Long globalId1 = createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); + Long globalId1 = createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); // Try to create the same artifact ID (should fail) - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) - .body("message", equalTo("An artifact with ID '" + artifactId + "' in group 'GroupsResourceTest' already exists.")); - - // Try to create the same artifact ID with FIND_OR_CREATE_VERSION for if exists (should return same artifact) - createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(409) + .body("error_code", equalTo(409)).body("message", equalTo("An artifact with ID '" + artifactId + + "' in group 'GroupsResourceTest' already exists.")); + + // Try to create the same artifact ID with FIND_OR_CREATE_VERSION for if exists (should return same + // artifact) + createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)) - .body("version.version", equalTo("1")) - .body("artifact.createdOn", anything()); + .body("version.version", equalTo("1")).body("artifact.createdOn", anything()); // Try to create the same artifact ID with CREATE_VERSION for if exists (should create a new version) - createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, updatedArtifactContent, ContentTypes.APPLICATION_JSON); - ValidatableResponse resp = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .queryParam("ifExists", IfArtifactExists.CREATE_VERSION) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, + updatedArtifactContent, ContentTypes.APPLICATION_JSON); + ValidatableResponse resp = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .queryParam("ifExists", IfArtifactExists.CREATE_VERSION).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)) - .body("artifact.createdOn", anything()) - .body("version.version", equalTo("2")); - /*Integer globalId2 = */ + .body("artifact.createdOn", anything()).body("version.version", equalTo("2")); + /* Integer globalId2 = */ resp.extract().body().path("globalId"); - // Try to create the same artifact ID with FIND_OR_CREATE_VERSION - should return v1 (matching content) - createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - resp = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) + // Try to create the same artifact ID with FIND_OR_CREATE_VERSION - should return v1 (matching + // content) + createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); + resp = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); Integer globalId3 = resp.extract().body().path("version.globalId"); assertEquals(globalId1, globalId3.longValue()); - // Try to create the same artifact ID with FIND_OR_CREATE_VERSION and updated content - should create a new version + // Try to create the same artifact ID with FIND_OR_CREATE_VERSION and updated content - should create + // a new version // and use name and description from headers - createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, v3ArtifactContent, ContentTypes.APPLICATION_JSON); + createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, v3ArtifactContent, + ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setName(artifactName); createArtifact.getFirstVersion().setDescription(artifactDescription); - resp = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("version.version", equalTo("3")) - .body("version.name", equalTo(artifactName)) + resp = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .queryParam("ifExists", IfArtifactExists.FIND_OR_CREATE_VERSION).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("version.version", equalTo("3")).body("version.name", equalTo(artifactName)) .body("version.description", equalTo(artifactDescription)) .body("artifact.artifactType", equalTo(ArtifactType.OPENAPI)); } @@ -2105,86 +1424,50 @@ public void testDeleteArtifactWithRule() throws Exception { String artifactId = "testDeleteArtifactWithRule/EmptyAPI"; // Create an artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Add a rule CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createRule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createRule).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + .statusCode(204).body(anything()); // Get a single rule by name - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); // Delete the artifact - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); // Get a single rule by name (should be 404 because the artifact is gone) // Also try to get the artifact itself (should be 404) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() - .statusCode(404); - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() .statusCode(404); + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(404); // Re-create the artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Get a single rule by name (should be 404 because the artifact is gone) - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY") - .then() + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/VALIDITY").then() .statusCode(404); // Add the same rule - should work because the old rule was deleted when the artifact was deleted. createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(createRule) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .body(createRule).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then() + .statusCode(204).body(anything()); } @Test @@ -2195,103 +1478,57 @@ public void testCustomArtifactVersion() throws Exception { String artifactId = "MyVersionedAPI"; // Create OpenAPI artifact version 1.0.0 - io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateArtifact createArtifact = TestUtils.serverCreateArtifact( + artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setVersion("1.0.0"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", groupId) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .body("artifact.artifactId", equalTo(artifactId)) - .body("artifact.groupId", equalTo(groupId)) + given().when().contentType(CT_JSON).pathParam("groupId", groupId).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200) + .body("artifact.artifactId", equalTo(artifactId)).body("artifact.groupId", equalTo(groupId)) .body("version.version", equalTo("1.0.0")); // Make sure we can get the artifact content by version - given() - .when() - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) + given().when().pathParam("groupId", groupId).pathParam("artifactId", artifactId) .pathParam("version", "1.0.0") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo("Empty API")); + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content").then() + .statusCode(200).body("openapi", equalTo("3.0.2")).body("info.title", equalTo("Empty API")); // Make sure we can get the artifact meta-data by version - given() - .when() - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) + given().when().pathParam("groupId", groupId).pathParam("artifactId", artifactId) .pathParam("version", "1.0.0") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() - .statusCode(200) - .body("artifactId", equalTo(artifactId)) - .body("groupId", equalTo(groupId)) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() + .statusCode(200).body("artifactId", equalTo(artifactId)).body("groupId", equalTo(groupId)) .body("version", equalTo("1.0.0")); // Add version 1.0.1 String updatedContent = artifactContent.replace("Empty API", "Empty API (Version 1.0.1)"); - io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils.serverCreateVersion(updatedContent, ContentTypes.APPLICATION_JSON); + io.apicurio.registry.rest.v3.beans.CreateVersion createVersion = TestUtils + .serverCreateVersion(updatedContent, ContentTypes.APPLICATION_JSON); createVersion.setVersion("1.0.1"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("artifactId", equalTo(artifactId)) - .body("version", equalTo("1.0.1")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)); + given().when().contentType(CT_JSON).pathParam("groupId", groupId).pathParam("artifactId", artifactId) + .body(createVersion).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") + .then().statusCode(200).body("artifactId", equalTo(artifactId)) + .body("version", equalTo("1.0.1")).body("artifactType", equalTo(ArtifactType.OPENAPI)); // List the artifact versions - given() - .when() - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("count", equalTo(2)) - .body("versions[0].version", equalTo("1.0.0")) + given().when().pathParam("groupId", groupId).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("count", equalTo(2)).body("versions[0].version", equalTo("1.0.0")) .body("versions[1].version", equalTo("1.0.1")); // Add version 1.0.2 updatedContent = artifactContent.replace("Empty API", "Empty API (Version 1.0.2)"); createVersion = TestUtils.serverCreateVersion(updatedContent, ContentTypes.APPLICATION_JSON); createVersion.setVersion("1.0.2"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) - .body(createVersion) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("artifactId", equalTo(artifactId)) - .body("version", equalTo("1.0.2")) - .body("artifactType", equalTo(ArtifactType.OPENAPI)); + given().when().contentType(CT_JSON).pathParam("groupId", groupId).pathParam("artifactId", artifactId) + .body(createVersion).post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") + .then().statusCode(200).body("artifactId", equalTo(artifactId)) + .body("version", equalTo("1.0.2")).body("artifactType", equalTo(ArtifactType.OPENAPI)); // List the artifact versions - given() - .when() - .pathParam("groupId", groupId) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("count", equalTo(3)) - .body("versions[0].version", equalTo("1.0.0")) - .body("versions[1].version", equalTo("1.0.1")) - .body("versions[2].version", equalTo("1.0.2")); + given().when().pathParam("groupId", groupId).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("count", equalTo(3)).body("versions[0].version", equalTo("1.0.0")) + .body("versions[1].version", equalTo("1.0.1")).body("versions[2].version", equalTo("1.0.2")); } @@ -2300,19 +1537,17 @@ public void testCreateArtifactAfterDelete() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - indicate the type via a header param - createArtifact(GROUP, "testCreateArtifactAfterDelete/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testCreateArtifactAfterDelete/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Delete the artifact - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testCreateArtifactAfterDelete/EmptyAPI") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); // Create the same artifact - createArtifact(GROUP, "testCreateArtifactAfterDelete/EmptyAPI", ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testCreateArtifactAfterDelete/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } @Test @@ -2327,11 +1562,8 @@ void testArtifactWithReferences() throws Exception { // Create #2 referencing the #1, using different content List references = List.of(ArtifactReference.builder() - .groupId(metadata.getGroupId()) - .artifactId(metadata.getArtifactId()) - .version(metadata.getVersion()) - .name("foo") - .build()); + .groupId(metadata.getGroupId()).artifactId(metadata.getArtifactId()) + .version(metadata.getVersion()).name("foo").build()); artifactContent = getRandomValidJsonSchemaContent(); metadata = createArtifactExtendedRaw(GroupId.DEFAULT.getRawGroupIdWithDefaultString(), null, @@ -2339,21 +1571,21 @@ void testArtifactWithReferences() throws Exception { // Save the referencing artifact metadata for later use var referencingMD = metadata; - - var actualReferences = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(metadata.getArtifactId()).versions().byVersionExpression(metadata.getVersion()).references().get(); + + var actualReferences = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifacts().byArtifactId(metadata.getArtifactId()).versions() + .byVersionExpression(metadata.getVersion()).references().get(); assertEquals(references.size(), actualReferences.size()); assertEquals(references.get(0).getName(), actualReferences.get(0).getName()); assertEquals(references.get(0).getVersion(), actualReferences.get(0).getVersion()); assertEquals(references.get(0).getArtifactId(), actualReferences.get(0).getArtifactId()); assertEquals(references.get(0).getGroupId(), actualReferences.get(0).getGroupId()); - // Trying to use different references with the same content is ok, but the contentId and contentHash is different. + // Trying to use different references with the same content is ok, but the contentId and contentHash + // is different. List references2 = List.of(ArtifactReference.builder() - .groupId(metadata.getGroupId()) - .artifactId(metadata.getArtifactId()) - .version(metadata.getVersion()) - .name("foo2") - .build()); + .groupId(metadata.getGroupId()).artifactId(metadata.getArtifactId()) + .version(metadata.getVersion()).name("foo2").build()); var secondMetadata = createArtifactExtendedRaw(GroupId.DEFAULT.getRawGroupIdWithDefaultString(), null, ArtifactType.JSON, artifactContent, ContentTypes.APPLICATION_JSON, references2).getVersion(); @@ -2361,74 +1593,56 @@ void testArtifactWithReferences() throws Exception { assertNotEquals(secondMetadata.getContentId(), metadata.getContentId()); // Same references are not an issue - metadata = createArtifactExtendedRaw("default2", null, ArtifactType.JSON, artifactContent, ContentTypes.APPLICATION_JSON, references).getVersion(); + metadata = createArtifactExtendedRaw("default2", null, ArtifactType.JSON, artifactContent, + ContentTypes.APPLICATION_JSON, references).getVersion(); // Get references via globalId - var referenceResponse = given() - .when() - .pathParam("globalId", metadata.getGlobalId()) - .get("/registry/v3/ids/globalIds/{globalId}/references") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + var referenceResponse = given().when().pathParam("globalId", metadata.getGlobalId()) + .get("/registry/v3/ids/globalIds/{globalId}/references").then().statusCode(HTTP_OK).extract() + .as(new TypeRef>() { }); assertEquals(references, referenceResponse); // Get references via contentId - referenceResponse = given() - .when() - .pathParam("contentId", metadata.getContentId()) - .get("/registry/v3/ids/contentIds/{contentId}/references") - .then() - .statusCode(HTTP_OK) + referenceResponse = given().when().pathParam("contentId", metadata.getContentId()) + .get("/registry/v3/ids/contentIds/{contentId}/references").then().statusCode(HTTP_OK) .extract().as(new TypeRef>() { }); final String referencesSerialized = SqlUtil.serializeReferences(toReferenceDtos(references)); - //We calculate the hash using the content itself and the references - String contentHash = DigestUtils.sha256Hex(concatContentAndReferences(artifactContent.getBytes(StandardCharsets.UTF_8), referencesSerialized.getBytes(StandardCharsets.UTF_8))); + // We calculate the hash using the content itself and the references + String contentHash = DigestUtils + .sha256Hex(concatContentAndReferences(artifactContent.getBytes(StandardCharsets.UTF_8), + referencesSerialized.getBytes(StandardCharsets.UTF_8))); assertEquals(references, referenceResponse); // Get references via contentHash - referenceResponse = given() - .when() - .pathParam("contentHash", contentHash) - .get("/registry/v3/ids/contentHashes/{contentHash}/references") - .then() - .statusCode(HTTP_OK) + referenceResponse = given().when().pathParam("contentHash", contentHash) + .get("/registry/v3/ids/contentHashes/{contentHash}/references").then().statusCode(HTTP_OK) .extract().as(new TypeRef>() { }); assertEquals(references, referenceResponse); // Get references via GAV - referenceResponse = given() - .when() - .pathParam("groupId", metadata.getGroupId()) - .pathParam("artifactId", metadata.getArtifactId()) - .pathParam("version", metadata.getVersion()) + referenceResponse = given().when().pathParam("groupId", metadata.getGroupId()) + .pathParam("artifactId", metadata.getArtifactId()).pathParam("version", metadata.getVersion()) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/references") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .then().statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertEquals(references, referenceResponse); // Get INBOUND references via GAV - referenceResponse = given() - .when() + referenceResponse = given().when() .pathParam("groupId", new GroupId(referencedMD.getGroupId()).getRawGroupIdWithDefaultString()) .pathParam("artifactId", referencedMD.getArtifactId()) - .pathParam("version", referencedMD.getVersion()) - .queryParam("refType", ReferenceType.INBOUND) + .pathParam("version", referencedMD.getVersion()).queryParam("refType", ReferenceType.INBOUND) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/references") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .then().statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertFalse(referenceResponse.isEmpty()); assertEquals(2, referenceResponse.size()); @@ -2437,14 +1651,10 @@ void testArtifactWithReferences() throws Exception { assertEquals(referencingMD.getVersion(), referenceResponse.get(0).getVersion()); // Get INBOUND references via globalId - referenceResponse = given() - .when() - .pathParam("globalId", referencedMD.getGlobalId()) + referenceResponse = given().when().pathParam("globalId", referencedMD.getGlobalId()) .queryParam("refType", ReferenceType.INBOUND) - .get("/registry/v3/ids/globalIds/{globalId}/references") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .get("/registry/v3/ids/globalIds/{globalId}/references").then().statusCode(HTTP_OK).extract() + .as(new TypeRef>() { }); assertFalse(referenceResponse.isEmpty()); assertEquals(2, referenceResponse.size()); @@ -2453,8 +1663,10 @@ void testArtifactWithReferences() throws Exception { assertEquals(referencingMD.getVersion(), referenceResponse.get(0).getVersion()); } - private byte[] concatContentAndReferences(byte[] contentBytes, byte[] referencesBytes) throws IOException { - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(contentBytes.length + referencesBytes.length); + private byte[] concatContentAndReferences(byte[] contentBytes, byte[] referencesBytes) + throws IOException { + ByteArrayOutputStream outputStream = new ByteArrayOutputStream( + contentBytes.length + referencesBytes.length); outputStream.write(contentBytes); outputStream.write(referencesBytes); return outputStream.toByteArray(); @@ -2466,32 +1678,23 @@ public void testArtifactComments() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); // Get comments for the artifact (should be none) - List comments = given() - .when() - .pathParam("groupId", GROUP) + List comments = given().when().pathParam("groupId", GROUP) .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/latest/comments").then() + .statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertEquals(0, comments.size()); // Create a new comment NewComment nc = NewComment.builder().value("COMMENT_1").build(); - Comment comment1 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(nc) + Comment comment1 = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", artifactId).body(nc) .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(Comment.class); + .then().statusCode(HTTP_OK).extract().as(Comment.class); assertNotNull(comment1); assertNotNull(comment1.getCommentId()); assertNotNull(comment1.getValue()); @@ -2500,16 +1703,10 @@ public void testArtifactComments() throws Exception { // Create another new comment nc = NewComment.builder().value("COMMENT_2").build(); - Comment comment2 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .body(nc) + Comment comment2 = given().when().contentType(CT_JSON).pathParam("groupId", GROUP) + .pathParam("artifactId", artifactId).body(nc) .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(Comment.class); + .then().statusCode(HTTP_OK).extract().as(Comment.class); assertNotNull(comment2); assertNotNull(comment2.getCommentId()); assertNotNull(comment2.getValue()); @@ -2517,14 +1714,9 @@ public void testArtifactComments() throws Exception { assertEquals("COMMENT_2", comment2.getValue()); // Get the list of comments (should have 2) - comments = given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + comments = given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .then().statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertEquals(2, comments.size()); assertEquals("COMMENT_2", comments.get(0).getValue()); @@ -2532,50 +1724,30 @@ public void testArtifactComments() throws Exception { // Update a comment nc = NewComment.builder().value("COMMENT_2_UPDATED").build(); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .pathParam("commentId", comment2.getCommentId()) - .body(nc) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .pathParam("commentId", comment2.getCommentId()).body(nc) .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments/{commentId}") - .then() - .statusCode(HTTP_NO_CONTENT); + .then().statusCode(HTTP_NO_CONTENT); // Get the list of comments (should have 2) - comments = given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + comments = given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .then().statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertEquals(2, comments.size()); assertEquals("COMMENT_2_UPDATED", comments.get(0).getValue()); assertEquals("COMMENT_1", comments.get(1).getValue()); // Delete a comment - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .pathParam("commentId", comment2.getCommentId()) .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments/{commentId}") - .then() - .statusCode(HTTP_NO_CONTENT); + .then().statusCode(HTTP_NO_CONTENT); // Get the list of comments (should have only 1) - comments = given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) + comments = given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/comments") - .then() - .statusCode(HTTP_OK) - .extract().as(new TypeRef>() { + .then().statusCode(HTTP_OK).extract().as(new TypeRef>() { }); assertEquals(1, comments.size()); assertEquals("COMMENT_1", comments.get(0).getValue()); @@ -2591,27 +1763,15 @@ public void testCreateArtifactIntegrityRuleViolation() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.INTEGRITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createRule) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createRule) .pathParam("artifactId", artifactId) - .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules") - .then() - .statusCode(204) + .post("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules").then().statusCode(204) .body(anything()); // Verify the rule was added - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("INTEGRITY")) + given().when().pathParam("groupId", GROUP).pathParam("artifactId", artifactId) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/rules/INTEGRITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("INTEGRITY")) .body("config", equalTo("FULL")); // Now try registering an artifact with a valid reference @@ -2621,16 +1781,12 @@ public void testCreateArtifactIntegrityRuleViolation() throws Exception { reference.setArtifactId(artifactId); reference.setName("other.json#/defs/Foo"); - CreateVersion createVersion = TestUtils.clientCreateVersion(artifactContent, ContentTypes.APPLICATION_JSON); + CreateVersion createVersion = TestUtils.clientCreateVersion(artifactContent, + ContentTypes.APPLICATION_JSON); createVersion.setVersion("2"); createVersion.getContent().setReferences(List.of(reference)); - clientV3 - .groups() - .byGroupId(GROUP) - .artifacts() - .byArtifactId(artifactId) - .versions() + clientV3.groups().byGroupId(GROUP).artifacts().byArtifactId(artifactId).versions() .post(createVersion); // Now try registering an artifact with an INVALID reference @@ -2646,12 +1802,7 @@ public void testCreateArtifactIntegrityRuleViolation() throws Exception { CreateVersion f_createVersion = createVersion; var exception_1 = assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { - clientV3 - .groups() - .byGroupId(GROUP) - .artifacts() - .byArtifactId(artifactId) - .versions() + clientV3.groups().byGroupId(GROUP).artifacts().byArtifactId(artifactId).versions() .post(f_createVersion); }); Assertions.assertEquals(409, exception_1.getErrorCode()); @@ -2677,12 +1828,7 @@ public void testCreateArtifactIntegrityRuleViolation() throws Exception { CreateVersion f_createVersion2 = createVersion; var exception_2 = assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { - clientV3 - .groups() - .byGroupId(GROUP) - .artifacts() - .byArtifactId(artifactId) - .versions() + clientV3.groups().byGroupId(GROUP).artifacts().byArtifactId(artifactId).versions() .post(f_createVersion2); }); Assertions.assertEquals(409, exception_2.getErrorCode()); @@ -2695,72 +1841,54 @@ public void testCreateArtifactIntegrityRuleViolation() throws Exception { CreateVersion f_createVersion3 = createVersion; var exception_3 = assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { - clientV3 - .groups() - .byGroupId(GROUP) - .artifacts() - .byArtifactId(artifactId) - .versions() + clientV3.groups().byGroupId(GROUP).artifacts().byArtifactId(artifactId).versions() .post(f_createVersion3); }); Assertions.assertEquals(409, exception_3.getErrorCode()); Assertions.assertEquals("RuleViolationException", exception_3.getName()); } - @Test public void testGetArtifactVersionWithReferences() throws Exception { String referencedTypesContent = resourceToString("referenced-types.json"); String withExternalRefContent = resourceToString("openapi-with-external-ref.json"); // Create the artifact containing a type to be referenced - createArtifact(GROUP, "testGetArtifactVersionWithReferences/ReferencedTypes", ArtifactType.OPENAPI, referencedTypesContent, ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testGetArtifactVersionWithReferences/ReferencedTypes", ArtifactType.OPENAPI, + referencedTypesContent, ContentTypes.APPLICATION_JSON); // Create the artifact that references the type - List refs = Collections.singletonList( - ArtifactReference.builder() - .name("./referenced-types.json#/components/schemas/Widget") - .groupId(GROUP) - .artifactId("testGetArtifactVersionWithReferences/ReferencedTypes") - .version("1") - .build()); - createArtifactWithReferences(GROUP, "testGetArtifactVersionWithReferences/WithExternalRef", ArtifactType.OPENAPI, - withExternalRefContent, ContentTypes.APPLICATION_JSON, refs); + List refs = Collections.singletonList(ArtifactReference.builder() + .name("./referenced-types.json#/components/schemas/Widget").groupId(GROUP) + .artifactId("testGetArtifactVersionWithReferences/ReferencedTypes").version("1").build()); + createArtifactWithReferences(GROUP, "testGetArtifactVersionWithReferences/WithExternalRef", + ArtifactType.OPENAPI, withExternalRefContent, ContentTypes.APPLICATION_JSON, refs); // Get the content of the artifact preserving external references - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testGetArtifactVersionWithReferences/WithExternalRef") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", equalTo("./referenced-types.json#/components/schemas/Widget")); + .then().statusCode(200).body("openapi", equalTo("3.0.2")) + .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", + equalTo("./referenced-types.json#/components/schemas/Widget")); // Get the content of the artifact rewriting external references - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testGetArtifactVersionWithReferences/WithExternalRef") .queryParam("references", "REWRITE") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", endsWith("/apis/registry/v3/groups/GroupsResourceTest/artifacts/testGetArtifactVersionWithReferences%2FReferencedTypes/versions/1?references=REWRITE#/components/schemas/Widget")); + .then().statusCode(200).body("openapi", equalTo("3.0.2")) + .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", endsWith( + "/apis/registry/v3/groups/GroupsResourceTest/artifacts/testGetArtifactVersionWithReferences%2FReferencedTypes/versions/1?references=REWRITE#/components/schemas/Widget")); // Get the content of the artifact inlining/dereferencing external references - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testGetArtifactVersionWithReferences/WithExternalRef") .queryParam("references", "DEREFERENCE") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", equalTo("#/components/schemas/Widget")); + .then().statusCode(200).body("openapi", equalTo("3.0.2")) + .body("paths.widgets.get.responses.200.content.json.schema.items.$ref", + equalTo("#/components/schemas/Widget")); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/IdsResourceTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/IdsResourceTest.java index 53abcf0be3..e29efc1330 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/IdsResourceTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/IdsResourceTest.java @@ -27,98 +27,67 @@ public void testIdsAfterCreate() throws Exception { // Create a throwaway artifact so that contentId for future artifacts with different // content will need to be greater than 0. - this.createArtifact(GROUP + "-foo", "Empty-0", ArtifactType.WSDL, resourceToString("sample.wsdl"), ContentTypes.APPLICATION_XML); + this.createArtifact(GROUP + "-foo", "Empty-0", ArtifactType.WSDL, resourceToString("sample.wsdl"), + ContentTypes.APPLICATION_XML); String artifactId1 = "testIdsAfterCreate/Empty-1"; String artifactId2 = "testIdsAfterCreate/Empty-2"; // Create artifact 1 - CreateArtifact createArtifact1 = TestUtils.serverCreateArtifact(artifactId1, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - CreateArtifactResponse createArtifactResponse1 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact1) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .extract() - .as(CreateArtifactResponse.class); + CreateArtifact createArtifact1 = TestUtils.serverCreateArtifact(artifactId1, ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); + CreateArtifactResponse createArtifactResponse1 = given().when().contentType(CT_JSON) + .pathParam("groupId", GROUP).body(createArtifact1) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200).extract() + .as(CreateArtifactResponse.class); // Create artifact 2 - CreateArtifact createArtifact2 = TestUtils.serverCreateArtifact(artifactId2, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - CreateArtifactResponse createArtifactResponse2 = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact2) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .extract() - .as(CreateArtifactResponse.class); + CreateArtifact createArtifact2 = TestUtils.serverCreateArtifact(artifactId2, ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); + CreateArtifactResponse createArtifactResponse2 = given().when().contentType(CT_JSON) + .pathParam("groupId", GROUP).body(createArtifact2) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200).extract() + .as(CreateArtifactResponse.class); Assertions.assertNotNull(createArtifactResponse1.getVersion().getGlobalId()); Assertions.assertNotNull(createArtifactResponse1.getVersion().getContentId()); Assertions.assertNotEquals(0, createArtifactResponse1.getVersion().getContentId()); - Assertions.assertNotEquals(createArtifactResponse1.getVersion().getGlobalId(), createArtifactResponse2.getVersion().getGlobalId()); - Assertions.assertEquals(createArtifactResponse1.getVersion().getContentId(), createArtifactResponse2.getVersion().getContentId()); + Assertions.assertNotEquals(createArtifactResponse1.getVersion().getGlobalId(), + createArtifactResponse2.getVersion().getGlobalId()); + Assertions.assertEquals(createArtifactResponse1.getVersion().getContentId(), + createArtifactResponse2.getVersion().getContentId()); // Get artifact1 meta data and check the contentId - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId1) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() - .statusCode(200) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId1) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() + .statusCode(200).body("artifactType", equalTo(ArtifactType.OPENAPI)) .body("groupId", equalTo(GROUP)) .body("contentId", equalTo(createArtifactResponse1.getVersion().getContentId().intValue())); - // Get artifact2 meta data and check the contentId - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .pathParam("artifactId", artifactId2) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest") - .then() - .statusCode(200) - .body("artifactType", equalTo(ArtifactType.OPENAPI)) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).pathParam("artifactId", artifactId2) + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest").then() + .statusCode(200).body("artifactType", equalTo(ArtifactType.OPENAPI)) .body("groupId", equalTo(GROUP)) .body("contentId", equalTo(createArtifactResponse2.getVersion().getContentId().intValue())); // List versions in artifact, make sure contentId is returned. - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .pathParam("artifactId", createArtifactResponse1.getVersion().getArtifactId()) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions") - .then() - .statusCode(200) - .body("count", equalTo(1)) - .body("versions[0].contentId", notNullValue()) - .body("versions[0].contentId", not(equalTo(0))) - .body("versions[0].contentId", equalTo(createArtifactResponse1.getVersion().getContentId().intValue())); + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions").then().statusCode(200) + .body("count", equalTo(1)).body("versions[0].contentId", notNullValue()) + .body("versions[0].contentId", not(equalTo(0))).body("versions[0].contentId", + equalTo(createArtifactResponse1.getVersion().getContentId().intValue())); // Get artifact version meta-data, make sure contentId is returned - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) + given().when().contentType(CT_JSON).pathParam("groupId", GROUP) .pathParam("artifactId", createArtifactResponse1.getVersion().getArtifactId()) .pathParam("version", createArtifactResponse1.getVersion().getVersion()) - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(200) .body("globalId", equalTo(createArtifactResponse1.getVersion().getGlobalId().intValue())) .body("contentId", equalTo(createArtifactResponse1.getVersion().getContentId().intValue())); - } @Test @@ -129,30 +98,19 @@ public void testGetByGlobalId() throws Exception { String artifactId = "testGetByGlobalId/Empty"; // Create the artifact. - CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - CreateArtifactResponse createArtifactResponse = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .extract() + CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); + CreateArtifactResponse createArtifactResponse = given().when().contentType(CT_JSON) + .pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200).extract() .as(CreateArtifactResponse.class); long globalId = createArtifactResponse.getVersion().getGlobalId(); // Get by globalId - given() - .when() - .contentType(CT_JSON) - .pathParam("globalId", globalId) - .get("/registry/v3/ids/globalIds/{globalId}") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo(title)); + given().when().contentType(CT_JSON).pathParam("globalId", globalId) + .get("/registry/v3/ids/globalIds/{globalId}").then().statusCode(200) + .body("openapi", equalTo("3.0.2")).body("info.title", equalTo(title)); } @@ -167,11 +125,11 @@ public void testGetByGlobalIdIssue1501() throws Exception { // Create two artifacts with same artifactId but with different groupId - long globalId1 = createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON) - .getVersion().getGlobalId(); + long globalId1 = createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); - long globalId2 = createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON) - .getVersion().getGlobalId(); + long globalId2 = createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); // Get by globalId should not fail clientV3.ids().globalIds().byGlobalId(globalId1).get(); @@ -179,7 +137,6 @@ public void testGetByGlobalIdIssue1501() throws Exception { } - @Test public void testGetByContentId() throws Exception { String title = "Test By Content ID API"; @@ -188,39 +145,23 @@ public void testGetByContentId() throws Exception { String artifactId = "testGetByContentId/Empty"; // Create the artifact. - CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - CreateArtifactResponse createArtifactResponse = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200) - .extract() - .as(CreateArtifactResponse.class); + CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); + CreateArtifactResponse createArtifactResponse = given().when().contentType(CT_JSON) + .pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200).extract() + .as(CreateArtifactResponse.class); long contentId = createArtifactResponse.getVersion().getContentId(); // Get by contentId - given() - .when() - .contentType(CT_JSON) - .pathParam("contentId", contentId) - .get("/registry/v3/ids/contentIds/{contentId}") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo(title)); + given().when().contentType(CT_JSON).pathParam("contentId", contentId) + .get("/registry/v3/ids/contentIds/{contentId}").then().statusCode(200) + .body("openapi", equalTo("3.0.2")).body("info.title", equalTo(title)); // Get by contentId (not found) - given() - .when() - .contentType(CT_JSON) - .pathParam("contentId", Integer.MAX_VALUE) - .get("/registry/v3/ids/contentIds/{contentId}") - .then() - .statusCode(404); + given().when().contentType(CT_JSON).pathParam("contentId", Integer.MAX_VALUE) + .get("/registry/v3/ids/contentIds/{contentId}").then().statusCode(404); } @Test @@ -233,36 +174,19 @@ public void testGetByContentHash() throws Exception { String artifactId = "testGetByContentHash/Empty"; // Create the artifact. - CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GROUP) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then() - .statusCode(200); + CreateArtifact createArtifact = TestUtils.serverCreateArtifact(artifactId, ArtifactType.OPENAPI, + artifactContent, ContentTypes.APPLICATION_JSON); + given().when().contentType(CT_JSON).pathParam("groupId", GROUP).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then().statusCode(200); // Get by contentHash - given() - .when() - .contentType(CT_JSON) - .pathParam("contentHash", contentHash) - .get("/registry/v3/ids/contentHashes/{contentHash}") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo(title)); - + given().when().contentType(CT_JSON).pathParam("contentHash", contentHash) + .get("/registry/v3/ids/contentHashes/{contentHash}").then().statusCode(200) + .body("openapi", equalTo("3.0.2")).body("info.title", equalTo(title)); // Get by contentHash (not found) - given() - .when() - .contentType(CT_JSON) - .pathParam("contentHash", "CONTENT-HASH-NOT-VALID") - .get("/registry/v3/ids/contentHashes/{contentHash}") - .then() - .statusCode(404); + given().when().contentType(CT_JSON).pathParam("contentHash", "CONTENT-HASH-NOT-VALID") + .get("/registry/v3/ids/contentHashes/{contentHash}").then().statusCode(404); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/ImportExportTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/ImportExportTest.java index c208bbc7f8..55fd7c0a9a 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/ImportExportTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/ImportExportTest.java @@ -110,7 +110,8 @@ public void testExportImport() throws Exception { String artifactId = "TestArtifact-" + idx; int numExtraVersions = idx - 1; for (int jdx = 0; jdx < numExtraVersions; jdx++) { - createArtifactVersion(groupId, artifactId, "{\"title\": \"Version " + jdx + "\"}", ContentTypes.APPLICATION_JSON); + createArtifactVersion(groupId, artifactId, "{\"title\": \"Version " + jdx + "\"}", + ContentTypes.APPLICATION_JSON); } } @@ -122,8 +123,10 @@ public void testExportImport() throws Exception { metaData.setName("Version #" + jdx); metaData.setDescription("This is version number: " + jdx); metaData.setLabels(new Labels()); - metaData.getLabels().setAdditionalData(Map.of("artifact-number", "" + idx, "version-number", "" + jdx)); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(""+(jdx+1)).put(metaData); + metaData.getLabels() + .setAdditionalData(Map.of("artifact-number", "" + idx, "version-number", "" + jdx)); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("" + (jdx + 1)).put(metaData); } } @@ -134,12 +137,14 @@ public void testExportImport() throws Exception { createBranch.setBranchId("odds"); createBranch.setDescription("Odd numbered versions"); createBranch.setVersions(List.of("1", "3", "5")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches() + .post(createBranch); createBranch = new CreateBranch(); createBranch.setBranchId("evens"); createBranch.setDescription("Even numbered versions"); createBranch.setVersions(List.of("2", "4")); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().post(createBranch); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches() + .post(createBranch); // Configure some global rules CreateRule createRule = new CreateRule(); @@ -153,11 +158,13 @@ public void testExportImport() throws Exception { createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig(ValidityLevel.SYNTAX_ONLY.name()); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules().post(createRule); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules() + .post(createRule); createRule = new CreateRule(); createRule.setRuleType(RuleType.INTEGRITY); createRule.setConfig(IntegrityLevel.FULL.name()); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules().post(createRule); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules() + .post(createRule); // Add some comments // TODO add comments @@ -202,14 +209,17 @@ public void testExportImport() throws Exception { }); Assertions.assertEquals(2, groups.getCount()); Assertions.assertEquals("PrimaryTestGroup", groups.getGroups().get(0).getGroupId()); - Assertions.assertEquals("The group for the export/import test.", groups.getGroups().get(0).getDescription()); + Assertions.assertEquals("The group for the export/import test.", + groups.getGroups().get(0).getDescription()); Assertions.assertEquals("SecondaryTestGroup", groups.getGroups().get(1).getGroupId()); - Assertions.assertEquals("Another test group that is empty.", groups.getGroups().get(1).getDescription()); + Assertions.assertEquals("Another test group that is empty.", + groups.getGroups().get(1).getDescription()); // TODO: check group labels (not returned by group search) // Assert empty artifact - ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("EmptyArtifact").get(); + ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId("EmptyArtifact") + .get(); Assertions.assertEquals(groupId, amd.getGroupId()); Assertions.assertEquals("EmptyArtifact", amd.getArtifactId()); Assertions.assertEquals("Empty artifact", amd.getName()); @@ -237,13 +247,15 @@ public void testExportImport() throws Exception { int expectedNumberOfVersions = idx; // Check the artifact has the correct # of versions - VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(); + VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().get(); Assertions.assertEquals(expectedNumberOfVersions, versions.getCount()); // Check each version for (int jdx = 0; jdx < idx; jdx++) { String version = "" + (jdx + 1); - VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(version).get(); + VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression(version).get(); Assertions.assertEquals(groupId, vmd.getGroupId()); Assertions.assertEquals(artifactId, vmd.getArtifactId()); Assertions.assertEquals(version, vmd.getVersion()); @@ -253,22 +265,27 @@ public void testExportImport() throws Exception { } // Assert branches - BranchSearchResults branches = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().get(); + BranchSearchResults branches = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactIdWithBranches).branches().get(); Assertions.assertEquals(3, branches.getCount()); Assertions.assertEquals("evens", branches.getBranches().get(0).getBranchId()); Assertions.assertEquals("latest", branches.getBranches().get(1).getBranchId()); Assertions.assertEquals("odds", branches.getBranches().get(2).getBranchId()); - BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().byBranchId("evens").get(); + BranchMetaData branch = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactIdWithBranches).branches().byBranchId("evens").get(); Assertions.assertEquals("evens", branch.getBranchId()); Assertions.assertEquals("Even numbered versions", branch.getDescription()); Assertions.assertEquals(false, branch.getSystemDefined()); - branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().byBranchId("latest").get(); + branch = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches) + .branches().byBranchId("latest").get(); Assertions.assertEquals(true, branch.getSystemDefined()); - VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().byBranchId("evens").versions().get(); + VersionSearchResults versions = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactIdWithBranches).branches().byBranchId("evens").versions().get(); Assertions.assertEquals(2, versions.getCount()); - versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches).branches().byBranchId("odds").versions().get(); + versions = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithBranches) + .branches().byBranchId("odds").versions().get(); Assertions.assertEquals(3, versions.getCount()); // Assert global rules @@ -278,11 +295,14 @@ public void testExportImport() throws Exception { Assertions.assertEquals(ValidityLevel.FULL.name(), rule.getConfig()); // Assert artifact rules - rules = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules().get(); + rules = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules() + .get(); Assertions.assertEquals(2, rules.size()); - rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules().byRuleType(RuleType.VALIDITY.name()).get(); + rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules() + .byRuleType(RuleType.VALIDITY.name()).get(); Assertions.assertEquals(ValidityLevel.SYNTAX_ONLY.name(), rule.getConfig()); - rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules().byRuleType(RuleType.INTEGRITY.name()).get(); + rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIdWithRules).rules() + .byRuleType(RuleType.INTEGRITY.name()).get(); Assertions.assertEquals(IntegrityLevel.FULL.name(), rule.getConfig()); // Assert artifact comments diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchArtifactsTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchArtifactsTest.java index f06b920825..6ebc7499d7 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchArtifactsTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchArtifactsTest.java @@ -26,26 +26,20 @@ public void testSearchArtifactsByGroup() throws Exception { for (int idx = 0; idx < 5; idx++) { String title = "Empty API " + idx; String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON, - (ca) -> { + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON, (ca) -> { ca.getFirstVersion().setName(title); }); } // Create 3 artifacts in some other group for (int idx = 0; idx < 5; idx++) { String artifactId = "Empty-" + idx; - this.createArtifact("SearchResourceTest", artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact("SearchResourceTest", artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } - given() - .when() - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)) - .body("artifacts[0].groupId", equalTo(group)) - ; + given().when().queryParam("groupId", group).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(5)).body("artifacts[0].groupId", equalTo(group)); } @Test @@ -57,53 +51,46 @@ public void testSearchArtifactsByName() throws Exception { // Two with the UUID name for (int idx = 0; idx < 2; idx++) { String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", name), ContentTypes.APPLICATION_JSON, - (ca) -> { + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", name), ContentTypes.APPLICATION_JSON, (ca) -> { ca.setName(name); }); } // Three with a different name for (int idx = 2; idx < 5; idx++) { String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } - given() - .when() - .queryParam("name", name) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) + given().when().queryParam("name", name).get("/registry/v3/search/artifacts").then().statusCode(200) .body("count", equalTo(2)); } @Test public void testSearchArtifactsByDescription() throws Exception { String group = UUID.randomUUID().toString(); - String description = "The description is "+ UUID.randomUUID().toString(); + String description = "The description is " + UUID.randomUUID().toString(); String artifactContent = resourceToString("openapi-empty.json"); // Two with the UUID description for (int idx = 0; idx < 2; idx++) { String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("An example API design using OpenAPI.", description), ContentTypes.APPLICATION_JSON, - (ca) -> { + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("An example API design using OpenAPI.", description), + ContentTypes.APPLICATION_JSON, (ca) -> { ca.setDescription(description); }); } // Three with the default description for (int idx = 2; idx < 5; idx++) { String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } - given() - .when() - .queryParam("description", description) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(2)); + given().when().queryParam("description", description).get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(2)); } @Test @@ -115,7 +102,8 @@ public void testSearchArtifactsByLabels() throws Exception { for (int idx = 0; idx < 5; idx++) { String title = "Empty API " + idx; String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); Map labels = new HashMap<>(); labels.put("all-key", "all-value"); @@ -123,114 +111,46 @@ public void testSearchArtifactsByLabels() throws Exception { labels.put("another-key-" + idx, "another-value-" + idx); labels.put("a-key-" + idx, "lorem ipsum"); labels.put("extra-key-" + (idx % 2), "lorem ipsum"); - + // Update the artifact meta-data EditableArtifactMetaData metaData = new EditableArtifactMetaData(); metaData.setName(title); metaData.setDescription("Some description of an API"); metaData.setLabels(labels); - given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", group) - .pathParam("artifactId", artifactId) - .body(metaData) - .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}") - .then() - .statusCode(204); + given().when().contentType(CT_JSON).pathParam("groupId", group) + .pathParam("artifactId", artifactId).body(metaData) + .put("/registry/v3/groups/{groupId}/artifacts/{artifactId}").then().statusCode(204); } - given() - .when() - .queryParam("labels", "all-key:all-value") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)); - - given() - .when() - .queryParam("labels", "key-1:value-1") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(1)); - - given() - .when() - .queryParam("labels", "key-1:value-1") - .queryParam("labels", "another-key-1:another-value-1") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(1)); - - given() - .when() - .queryParam("labels", "key-1:value-1") - .queryParam("labels", "key-2:value-2") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)); - given() - .when() - .queryParam("labels", "key-1:value-1:") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)); - given() - .when() - .queryParam("labels", "key-1:") - .get("/registry/v3/search/artifacts") - .then() + given().when().queryParam("labels", "all-key:all-value").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(5)); + + given().when().queryParam("labels", "key-1:value-1").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(1)); + + given().when().queryParam("labels", "key-1:value-1") + .queryParam("labels", "another-key-1:another-value-1").get("/registry/v3/search/artifacts") + .then().statusCode(200).body("count", equalTo(1)); + + given().when().queryParam("labels", "key-1:value-1").queryParam("labels", "key-2:value-2") + .get("/registry/v3/search/artifacts").then().statusCode(200).body("count", equalTo(0)); + given().when().queryParam("labels", "key-1:value-1:").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(0)); + given().when().queryParam("labels", "key-1:").get("/registry/v3/search/artifacts").then() .statusCode(400); - given() - .when() - .queryParam("labels", ":value-1") - .get("/registry/v3/search/artifacts") - .then() + given().when().queryParam("labels", ":value-1").get("/registry/v3/search/artifacts").then() .statusCode(400); - given() - .when() - .queryParam("labels", "all-key") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)); - given() - .when() - .queryParam("labels", "a-key-1") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(1)); - given() - .when() - .queryParam("labels", "extra-key-0") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(3)); - given() - .when() - .queryParam("labels", "extra-key-2") - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)); - given() - .when() - .queryParam("labels", ":all-key") - .get("/registry/v3/search/artifacts") - .then() + given().when().queryParam("labels", "all-key").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(5)); + given().when().queryParam("labels", "a-key-1").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(1)); + given().when().queryParam("labels", "extra-key-0").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(3)); + given().when().queryParam("labels", "extra-key-2").get("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(0)); + given().when().queryParam("labels", ":all-key").get("/registry/v3/search/artifacts").then() .statusCode(400); - given() - .when() - .queryParam("labels", "all-key:") - .get("/registry/v3/search/artifacts") - .then() + given().when().queryParam("labels", "all-key:").get("/registry/v3/search/artifacts").then() .statusCode(400); } @@ -242,56 +162,28 @@ public void testSearchArtifactsOrderBy() throws Exception { for (int idx = 0; idx < 5; idx++) { String artifactId = "Empty-" + idx; String name = "empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", name), - ContentTypes.APPLICATION_JSON, (ca) -> { + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", name), ContentTypes.APPLICATION_JSON, (ca) -> { ca.setName(name); ca.getFirstVersion().setName(name); }); } - given() - .when() - .queryParam("orderby", "name") - .queryParam("order", "asc") - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)) + given().when().queryParam("orderby", "name").queryParam("order", "asc").queryParam("groupId", group) + .get("/registry/v3/search/artifacts").then().statusCode(200).body("count", equalTo(5)) .body("artifacts[0].name", equalTo("empty-0")); - given() - .when() - .queryParam("orderby", "name") - .queryParam("order", "desc") - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)) + given().when().queryParam("orderby", "name").queryParam("order", "desc").queryParam("groupId", group) + .get("/registry/v3/search/artifacts").then().statusCode(200).body("count", equalTo(5)) .body("artifacts[0].name", equalTo("empty-4")); - given() - .when() - .queryParam("orderby", "createdOn") - .queryParam("order", "asc") - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)) - .body("artifacts[0].name", equalTo("empty-0")); + given().when().queryParam("orderby", "createdOn").queryParam("order", "asc") + .queryParam("groupId", group).get("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(5)).body("artifacts[0].name", equalTo("empty-0")); - given() - .when() - .queryParam("orderby", "createdOn") - .queryParam("order", "desc") - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(5)) - .body("artifacts[0].name", equalTo("empty-4")); + given().when().queryParam("orderby", "createdOn").queryParam("order", "desc") + .queryParam("groupId", group).get("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(5)).body("artifacts[0].name", equalTo("empty-4")); } @Test @@ -302,51 +194,26 @@ public void testSearchArtifactsLimitAndOffset() throws Exception { for (int idx = 0; idx < 20; idx++) { String artifactId = "Empty-" + idx; String name = "empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", name), ContentTypes.APPLICATION_JSON, - (ca) -> { + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", name), ContentTypes.APPLICATION_JSON, (ca) -> { ca.setName(name); ca.getFirstVersion().setName(name); }); } - given() - .when() - .queryParam("orderby", "createdOn") - .queryParam("order", "asc") - .queryParam("limit", 5) - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(20)) - .body("artifacts.size()", equalTo(5)) + given().when().queryParam("orderby", "createdOn").queryParam("order", "asc").queryParam("limit", 5) + .queryParam("groupId", group).get("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(20)).body("artifacts.size()", equalTo(5)) .body("artifacts[0].name", equalTo("empty-0")); - given() - .when() - .queryParam("orderby", "createdOn") - .queryParam("order", "asc") - .queryParam("limit", 15) - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(20)) - .body("artifacts.size()", equalTo(15)) + given().when().queryParam("orderby", "createdOn").queryParam("order", "asc").queryParam("limit", 15) + .queryParam("groupId", group).get("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(20)).body("artifacts.size()", equalTo(15)) .body("artifacts[0].name", equalTo("empty-0")); - given() - .when() - .queryParam("orderby", "createdOn") - .queryParam("order", "asc") - .queryParam("limit", 5) - .queryParam("offset", 10) - .queryParam("groupId", group) - .get("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(20)) - .body("artifacts.size()", equalTo(5)) + given().when().queryParam("orderby", "createdOn").queryParam("order", "asc").queryParam("limit", 5) + .queryParam("offset", 10).queryParam("groupId", group).get("/registry/v3/search/artifacts") + .then().statusCode(200).body("count", equalTo(20)).body("artifacts.size()", equalTo(5)) .body("artifacts[0].name", equalTo("empty-10")); } @@ -362,38 +229,29 @@ public void testSearchArtifactsByContent() throws Exception { for (int idx = 0; idx < 5; idx++) { String title = "testSearchByContent-empty-api-" + idx; String artifactId = "Empty-1-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); artifactId = "Empty-2-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); } - given() - .when() - .body(searchByContent) - .post("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(2)) - ; + given().when().body(searchByContent).post("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(2)); // Searching by content that is not the same should yield 0 results. - given() - .when() - .body(searchByCanonicalContent) - .post("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(0)) - ; + given().when().body(searchByCanonicalContent).post("/registry/v3/search/artifacts").then() + .statusCode(200).body("count", equalTo(0)); } - @Test public void testSearchArtifactsByCanonicalContent() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String group = "testSearchByCanonicalContent"; - String searchByContent = artifactContent.replaceAll("Empty API", "testSearchByCanonicalContent-empty-api-2").replaceAll("\\{", " {\n"); + String searchByContent = artifactContent + .replaceAll("Empty API", "testSearchByCanonicalContent-empty-api-2") + .replaceAll("\\{", " {\n"); System.out.println(searchByContent); @@ -401,23 +259,17 @@ public void testSearchArtifactsByCanonicalContent() throws Exception { for (int idx = 0; idx < 5; idx++) { String title = "testSearchByCanonicalContent-empty-api-" + idx; String artifactId = "Empty-1-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); artifactId = "Empty-2-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); } - given() - .when() - .queryParam("canonical", "true") - .queryParam("artifactType", ArtifactType.OPENAPI) - .body(searchByContent) - .post("/registry/v3/search/artifacts") - .then() - .statusCode(200) - .body("count", equalTo(2)) - ; + given().when().queryParam("canonical", "true").queryParam("artifactType", ArtifactType.OPENAPI) + .body(searchByContent).post("/registry/v3/search/artifacts").then().statusCode(200) + .body("count", equalTo(2)); } - } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchGroupsTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchGroupsTest.java index 9d08724aeb..9b4731009c 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchGroupsTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchGroupsTest.java @@ -66,10 +66,8 @@ public void testSearchGroupsByLabels() throws Exception { // Create 5 groups for (int idx = 0; idx < 5; idx++) { Labels labels = new Labels(); - labels.setAdditionalData(Map.of( - "byLabels", "byLabels-value-" + idx, - "byLabels-" + idx, "byLabels-value-" + idx - )); + labels.setAdditionalData( + Map.of("byLabels", "byLabels-value-" + idx, "byLabels-" + idx, "byLabels-value-" + idx)); CreateGroup createGroup = new CreateGroup(); createGroup.setGroupId(groupId + idx); @@ -83,30 +81,30 @@ public void testSearchGroupsByLabels() throws Exception { Assertions.assertEquals(1, results.getGroups().size()); results = clientV3.search().groups().get(request -> { - request.queryParameters.labels = new String[]{ "byLabels" }; + request.queryParameters.labels = new String[] { "byLabels" }; }); Assertions.assertEquals(5, results.getGroups().size()); results = clientV3.search().groups().get(request -> { - request.queryParameters.labels = new String[]{ "byLabels-3" }; + request.queryParameters.labels = new String[] { "byLabels-3" }; }); Assertions.assertEquals(1, results.getGroups().size()); Assertions.assertEquals("testSearchGroupsByLabels3", results.getGroups().get(0).getGroupId()); results = clientV3.search().groups().get(request -> { - request.queryParameters.labels = new String[]{ "byLabels:byLabels-value-3" }; + request.queryParameters.labels = new String[] { "byLabels:byLabels-value-3" }; }); Assertions.assertEquals(1, results.getGroups().size()); Assertions.assertEquals("testSearchGroupsByLabels3", results.getGroups().get(0).getGroupId()); results = clientV3.search().groups().get(request -> { - request.queryParameters.labels = new String[]{ "byLabels-3" }; + request.queryParameters.labels = new String[] { "byLabels-3" }; }); Assertions.assertEquals(1, results.getGroups().size()); Assertions.assertEquals("testSearchGroupsByLabels3", results.getGroups().get(0).getGroupId()); results = clientV3.search().groups().get(request -> { - request.queryParameters.labels = new String[]{ "byLabels-3:byLabels-value-3" }; + request.queryParameters.labels = new String[] { "byLabels-3:byLabels-value-3" }; }); Assertions.assertEquals(1, results.getGroups().size()); Assertions.assertEquals("testSearchGroupsByLabels3", results.getGroups().get(0).getGroupId()); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchVersionsTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchVersionsTest.java index 63dd9ce364..bb2c010809 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchVersionsTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SearchVersionsTest.java @@ -22,12 +22,14 @@ public void testSearchVersionsByGroupId() throws Exception { // Create 5 artifacts in group 1 for (int idx = 0; idx < 5; idx++) { String artifactId = "testSearchVersionsByGroupId_Group1_Artifact_" + idx; - createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } // Create 3 artifacts in group 2 for (int idx = 0; idx < 3; idx++) { String artifactId = "testSearchVersionsByGroupId_Group2_Artifact_" + idx; - this.createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + this.createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } VersionSearchResults results = clientV3.search().versions().get(config -> { @@ -56,13 +58,15 @@ public void testSearchVersionsByArtifactId() throws Exception { // Create 5 artifacts in group 1 (two versions each) for (int idx = 0; idx < 5; idx++) { String artifactId = "testSearchVersionsByArtifactId_Group1_Artifact_" + idx; - createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group1, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); createArtifactVersion(group1, artifactId, artifactContent, ContentTypes.APPLICATION_JSON); } // Create 3 artifacts in group 2 for (int idx = 0; idx < 3; idx++) { String artifactId = "testSearchVersionsByArtifactId_Group2_Artifact_" + idx; - createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, ContentTypes.APPLICATION_JSON); + createArtifact(group2, artifactId, ArtifactType.OPENAPI, artifactContent, + ContentTypes.APPLICATION_JSON); } VersionSearchResults results = clientV3.search().versions().get(config -> { @@ -80,8 +84,10 @@ public void testSearchVersionsByArtifactId() throws Exception { public void testSearchVersionsByContent() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String group = TestUtils.generateGroupId(); - String searchByCommonContent = artifactContent.replaceAll("Empty API", "testSearchVersionsByContent-empty-api"); - String searchByUniqueContent = artifactContent.replaceAll("Empty API", "testSearchVersionsByContent-empty-api-2"); + String searchByCommonContent = artifactContent.replaceAll("Empty API", + "testSearchVersionsByContent-empty-api"); + String searchByUniqueContent = artifactContent.replaceAll("Empty API", + "testSearchVersionsByContent-empty-api-2"); String searchByUnknownContent = artifactContent.replaceAll("\\{", " {\n"); // Create 5 artifacts with two versions each in the test group @@ -91,18 +97,22 @@ public void testSearchVersionsByContent() throws Exception { String uniqueContent = artifactContent.replaceAll("Empty API", name); String commonContent = searchByCommonContent; // First version is common content (same for every artifact) - createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, + ContentTypes.APPLICATION_JSON); // Second version is unique to each artifact createArtifactVersion(group, artifactId, uniqueContent, ContentTypes.APPLICATION_JSON); } - VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), ContentTypes.APPLICATION_JSON); + VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(5, results.getCount()); - results = clientV3.search().versions().post(asInputStream(searchByUniqueContent), ContentTypes.APPLICATION_JSON); + results = clientV3.search().versions().post(asInputStream(searchByUniqueContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(1, results.getCount()); - results = clientV3.search().versions().post(asInputStream(searchByUnknownContent), ContentTypes.APPLICATION_JSON); + results = clientV3.search().versions().post(asInputStream(searchByUnknownContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(0, results.getCount()); } @@ -110,8 +120,10 @@ public void testSearchVersionsByContent() throws Exception { public void testSearchVersionsByCanonicalContent() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String group = TestUtils.generateGroupId(); - String searchByCommonContent = artifactContent.replaceAll("Empty API", "testSearchVersionsByCanonicalContent-empty-api"); - String searchByUniqueContent = artifactContent.replaceAll("Empty API", "testSearchVersionsByCanonicalContent-empty-api-2"); + String searchByCommonContent = artifactContent.replaceAll("Empty API", + "testSearchVersionsByCanonicalContent-empty-api"); + String searchByUniqueContent = artifactContent.replaceAll("Empty API", + "testSearchVersionsByCanonicalContent-empty-api-2"); String searchByCanonicalContent = searchByUniqueContent.replaceAll("\\{", " {\n"); // Create 5 artifacts with two versions each in the test group @@ -121,24 +133,29 @@ public void testSearchVersionsByCanonicalContent() throws Exception { String uniqueContent = artifactContent.replaceAll("Empty API", name); String commonContent = searchByCommonContent; // First version is common content (same for every artifact) - createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, + ContentTypes.APPLICATION_JSON); // Second version is unique to each artifact createArtifactVersion(group, artifactId, uniqueContent, ContentTypes.APPLICATION_JSON); } - VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), ContentTypes.APPLICATION_JSON); + VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(5, results.getCount()); - results = clientV3.search().versions().post(asInputStream(searchByUniqueContent), ContentTypes.APPLICATION_JSON); + results = clientV3.search().versions().post(asInputStream(searchByUniqueContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(1, results.getCount()); - results = clientV3.search().versions().post(asInputStream(searchByCanonicalContent), ContentTypes.APPLICATION_JSON); + results = clientV3.search().versions().post(asInputStream(searchByCanonicalContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(0, results.getCount()); - results = clientV3.search().versions().post(asInputStream(searchByCanonicalContent), ContentTypes.APPLICATION_JSON, (config) -> { - config.queryParameters.canonical = true; - config.queryParameters.artifactType = ArtifactType.OPENAPI; - }); + results = clientV3.search().versions().post(asInputStream(searchByCanonicalContent), + ContentTypes.APPLICATION_JSON, (config) -> { + config.queryParameters.canonical = true; + config.queryParameters.artifactType = ArtifactType.OPENAPI; + }); Assertions.assertEquals(1, results.getCount()); } @@ -146,7 +163,8 @@ public void testSearchVersionsByCanonicalContent() throws Exception { public void testSearchVersionsByContentInGA() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); String group = TestUtils.generateGroupId(); - String searchByCommonContent = artifactContent.replaceAll("Empty API", "testSearchVersionsByContentInGA-empty-api"); + String searchByCommonContent = artifactContent.replaceAll("Empty API", + "testSearchVersionsByContentInGA-empty-api"); String searchByArtifactId = ""; // Create 5 artifacts with two versions each in the test group @@ -156,7 +174,8 @@ public void testSearchVersionsByContentInGA() throws Exception { String uniqueContent = artifactContent.replaceAll("Empty API", name); String commonContent = searchByCommonContent; // First version is common content (same for every artifact) - createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, ContentTypes.APPLICATION_JSON); + createArtifact(group, artifactId, ArtifactType.OPENAPI, commonContent, + ContentTypes.APPLICATION_JSON); // Second version is unique to each artifact createArtifactVersion(group, artifactId, uniqueContent, ContentTypes.APPLICATION_JSON); @@ -164,15 +183,17 @@ public void testSearchVersionsByContentInGA() throws Exception { searchByArtifactId = artifactId; } - VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), ContentTypes.APPLICATION_JSON); + VersionSearchResults results = clientV3.search().versions().post(asInputStream(searchByCommonContent), + ContentTypes.APPLICATION_JSON); Assertions.assertEquals(5, results.getCount()); // Same search, but also filter by groupId and artifactId - should be just 1 final String aid = searchByArtifactId; - results = clientV3.search().versions().post(asInputStream(searchByCommonContent), ContentTypes.APPLICATION_JSON, config -> { - config.queryParameters.groupId = group; - config.queryParameters.artifactId = aid; - }); + results = clientV3.search().versions().post(asInputStream(searchByCommonContent), + ContentTypes.APPLICATION_JSON, config -> { + config.queryParameters.groupId = group; + config.queryParameters.artifactId = aid; + }); Assertions.assertEquals(1, results.getCount()); } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SystemResourceTest.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SystemResourceTest.java index ed5bc45d75..ad78a5f75c 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SystemResourceTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/SystemResourceTest.java @@ -1,30 +1,23 @@ package io.apicurio.registry.noprofile.rest.v3; -import static io.restassured.RestAssured.given; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.equalTo; - -import org.junit.jupiter.api.Test; - import io.apicurio.registry.AbstractResourceTestBase; import io.quarkus.test.junit.QuarkusTest; +import org.junit.jupiter.api.Test; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; @QuarkusTest public class SystemResourceTest extends AbstractResourceTestBase { @Test public void testSystemInformation() { - given() - .when() - .contentType(CT_JSON) - .get("/registry/v3/system/info") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).get("/registry/v3/system/info").then().statusCode(200) .body("name", notNullValue()) - .body("description", equalTo("High performance, runtime registry for schemas and API designs.")) - .body("version", notNullValue()) - .body("builtOn", notNullValue()) - ; + .body("description", + equalTo("High performance, runtime registry for schemas and API designs.")) + .body("version", notNullValue()).body("builtOn", notNullValue()); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/impexp/ExportLoader.java b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/impexp/ExportLoader.java index 93c7e1d592..0695d5dbbb 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/impexp/ExportLoader.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/rest/v3/impexp/ExportLoader.java @@ -20,14 +20,9 @@ */ public class ExportLoader { - private static final String CONTENT = "{\r\n" + - " \"openapi\": \"3.0.2\",\r\n" + - " \"info\": {\r\n" + - " \"title\": \"Empty API\",\r\n" + - " \"version\": \"1.0.0\",\r\n" + - " \"description\": \"An example API design using OpenAPI.\"\r\n" + - " }\r\n" + - "}"; + private static final String CONTENT = "{\r\n" + " \"openapi\": \"3.0.2\",\r\n" + " \"info\": {\r\n" + + " \"title\": \"Empty API\",\r\n" + " \"version\": \"1.0.0\",\r\n" + + " \"description\": \"An example API design using OpenAPI.\"\r\n" + " }\r\n" + "}"; public static void main(String[] args) throws Exception { var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); @@ -37,9 +32,12 @@ public static void main(String[] args) throws Exception { System.out.println("Iteration: " + idx); String data = CONTENT.replace("1.0.0", "1.0." + idx); String artifactId = UUID.randomUUID().toString(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, data, ContentTypes.APPLICATION_JSON); - client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().post(createArtifact); - client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).delete(); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + data, ContentTypes.APPLICATION_JSON); + client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .post(createArtifact); + client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).delete(); } String testContent = CONTENT.replace("Empty API", "Test Artifact"); @@ -53,7 +51,8 @@ public static void main(String[] args) throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - client.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-1").rules().post(createRule); + client.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-1").rules() + .post(createRule); createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); @@ -66,7 +65,8 @@ private static void createVersion(RegistryClient client, String artifactId, Stri String data = testContent.replace("1.0.0", version); CreateVersion createVersion = TestUtils.clientCreateVersion(data, ContentTypes.APPLICATION_JSON); createVersion.setVersion(version); - client.groups().byGroupId("ImportTest").artifacts().byArtifactId(artifactId).versions().post(createVersion); + client.groups().byGroupId("ImportTest").artifacts().byArtifactId(artifactId).versions() + .post(createVersion); } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/serde/AvroSerdeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/serde/AvroSerdeTest.java index e38bd508a6..337402f42c 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/serde/AvroSerdeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/serde/AvroSerdeTest.java @@ -81,13 +81,15 @@ public void createIsolatedClient() { @Test public void testConfiguration() throws Exception { String recordName = "myrecord3"; - Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"" + recordName + "\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"" + recordName + + "\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); String groupId = TestUtils.generateGroupId(); String topic = generateArtifactId(); - /*final Integer globalId = */ - createArtifact(groupId, topic + "-" + recordName, ArtifactType.AVRO, schema.toString(), ContentTypes.APPLICATION_JSON); + /* final Integer globalId = */ + createArtifact(groupId, topic + "-" + recordName, ArtifactType.AVRO, schema.toString(), + ContentTypes.APPLICATION_JSON); Map config = new HashMap<>(); config.put(SerdeConfig.REGISTRY_URL, TestUtils.getRegistryV3ApiUrl(testPort)); @@ -139,21 +141,29 @@ record = deserializer.deserialize(topic, bytes); @Test public void testAvro() throws Exception { testAvroAutoRegisterIdInBody(RecordIdStrategy.class, () -> { - return restClient.groups().byGroupId("test-group-avro").artifacts().byArtifactId("myrecord3").versions().byVersionExpression("branch=latest").get(); + return restClient.groups().byGroupId("test-group-avro").artifacts().byArtifactId("myrecord3") + .versions().byVersionExpression("branch=latest").get(); }); } @Test public void testAvroQualifiedRecordIdStrategy() throws Exception { testAvroAutoRegisterIdInBody(QualifiedRecordIdStrategy.class, () -> { - return restClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId("test-group-avro.myrecord3").versions().byVersionExpression("branch=latest").get(); + return restClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId("test-group-avro.myrecord3").versions().byVersionExpression("branch=latest") + .get(); }); } - private void testAvroAutoRegisterIdInBody(Class> strategy, Supplier artifactFinder) throws Exception { - Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"namespace\":\"test-group-avro\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); - try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + private void testAvroAutoRegisterIdInBody( + Class> strategy, + Supplier artifactFinder) throws Exception { + Schema schema = new Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"namespace\":\"test-group-avro\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + try ( + AvroKafkaSerializer serializer = new AvroKafkaSerializer( + restClient); + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, strategy); @@ -194,9 +204,12 @@ private void testAvroAutoRegisterIdInBody(Class serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + Schema schema = new Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + try ( + AvroKafkaSerializer serializer = new AvroKafkaSerializer( + restClient); + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); @@ -238,7 +251,7 @@ public void testAvroJSON() throws Exception { @Test public void avroJsonWithReferences() throws Exception { try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); @@ -248,7 +261,8 @@ public void avroJsonWithReferences() throws Exception { config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); - config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()); deserializer.configure(config, false); AvroSchemaB avroSchemaB = new AvroSchemaB(); @@ -312,7 +326,7 @@ public void avroJsonWithReferences() throws Exception { @Test public void avroJsonWithReferencesDereferenced() throws Exception { try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); @@ -323,7 +337,8 @@ public void avroJsonWithReferencesDereferenced() throws Exception { config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); - config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()); deserializer.configure(config, false); AvroSchemaB avroSchemaB = new AvroSchemaB(); @@ -388,7 +403,7 @@ public void avroJsonWithReferencesDereferenced() throws Exception { @Test public void avroJsonWithReferencesDeserializerDereferenced() throws Exception { try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); @@ -398,7 +413,8 @@ public void avroJsonWithReferencesDeserializerDereferenced() throws Exception { config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); - config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()); config.putIfAbsent(SchemaResolverConfig.DESERIALIZER_DEREFERENCE_SCHEMA, "true"); deserializer.configure(config, false); @@ -457,8 +473,10 @@ public void avroJsonWithReferencesDeserializerDereferenced() throws Exception { @Test public void issue4463Test() throws Exception { - try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + try ( + AvroKafkaSerializer serializer = new AvroKafkaSerializer( + restClient); + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); @@ -468,18 +486,15 @@ public void issue4463Test() throws Exception { config = new HashMap<>(); config.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); - config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); + config.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()); deserializer.configure(config, false); LeadFallErstellen leadFallErstellen = LeadFallErstellen.newBuilder() - .setFall(Fall.newBuilder() - .setVerantwortlichkeitForFall(Verantwortlichkeit.newBuilder() - .setBenoetigteStellen(List.of(FdtCodeArt.newBuilder() - .setArt(20) - .setCode(24) - .build())) - .build()) - .build()) + .setFall(Fall.newBuilder().setVerantwortlichkeitForFall(Verantwortlichkeit.newBuilder() + .setBenoetigteStellen( + List.of(FdtCodeArt.newBuilder().setArt(20).setCode(24).build())) + .build()).build()) .build(); String artifactId = generateArtifactId(); @@ -502,9 +517,12 @@ public void issue4463Test() throws Exception { @Test public void testAvroUsingHeaders() throws Exception { - Schema schema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); - try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + Schema schema = new Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + try ( + AvroKafkaSerializer serializer = new AvroKafkaSerializer( + restClient); + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ENABLE_HEADERS, "true"); @@ -535,15 +553,14 @@ public void testAvroUsingHeaders() throws Exception { @Test public void testReferenceRaw() throws Exception { Schema.Parser parser = new Schema.Parser(); - Schema eventTypeSchema = parser.parse("{\n" + - " \"type\": \"enum\",\n" + - " \"namespace\": \"test\",\n" + - " \"name\": \"EventType\",\n" + - " \"symbols\": [\"CREATED\", \"DELETED\", \"UNDEFINED\", \"UPDATED\"]\n" + - " }\n"); + Schema eventTypeSchema = parser.parse("{\n" + " \"type\": \"enum\",\n" + + " \"namespace\": \"test\",\n" + " \"name\": \"EventType\",\n" + + " \"symbols\": [\"CREATED\", \"DELETED\", \"UNDEFINED\", \"UPDATED\"]\n" + " }\n"); - try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { + try ( + AvroKafkaSerializer serializer = new AvroKafkaSerializer( + restClient); + Deserializer deserializer = new AvroKafkaDeserializer<>(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ENABLE_HEADERS, "true"); @@ -568,8 +585,7 @@ public void testReferenceRaw() throws Exception { waitForSchema(id -> { try { return restClient.ids().globalIds().byGlobalId(id).get().readAllBytes().length > 0; - } - catch (IOException e) { + } catch (IOException e) { throw new RuntimeException(e); } }, bytes, byteBuffer -> globalIdkey); @@ -581,52 +597,36 @@ public void testReferenceRaw() throws Exception { } @ParameterizedTest - @ValueSource( - classes = { - io.apicurio.registry.serde.strategy.TopicIdStrategy.class, - io.apicurio.registry.serde.avro.strategy.QualifiedRecordIdStrategy.class, - io.apicurio.registry.serde.avro.strategy.RecordIdStrategy.class, - io.apicurio.registry.serde.avro.strategy.TopicRecordIdStrategy.class - } - ) + @ValueSource(classes = { io.apicurio.registry.serde.strategy.TopicIdStrategy.class, + io.apicurio.registry.serde.avro.strategy.QualifiedRecordIdStrategy.class, + io.apicurio.registry.serde.avro.strategy.RecordIdStrategy.class, + io.apicurio.registry.serde.avro.strategy.TopicRecordIdStrategy.class }) public void testAvroReflect(Class artifactResolverStrategyClass) throws Exception { - testAvroReflect( - artifactResolverStrategyClass, - ReflectAvroDatumProvider.class, - () -> new Tester("Apicurio", Tester.TesterState.ONLINE) - ); + testAvroReflect(artifactResolverStrategyClass, ReflectAvroDatumProvider.class, + () -> new Tester("Apicurio", Tester.TesterState.ONLINE)); } @ParameterizedTest - @ValueSource( - classes = { - io.apicurio.registry.serde.strategy.TopicIdStrategy.class, + @ValueSource(classes = { io.apicurio.registry.serde.strategy.TopicIdStrategy.class, io.apicurio.registry.serde.avro.strategy.QualifiedRecordIdStrategy.class, io.apicurio.registry.serde.avro.strategy.RecordIdStrategy.class, - io.apicurio.registry.serde.avro.strategy.TopicRecordIdStrategy.class - } - ) + io.apicurio.registry.serde.avro.strategy.TopicRecordIdStrategy.class }) public void testAvroReflectAllowNull(Class artifactResolverStrategyClass) throws Exception { - testAvroReflect( - artifactResolverStrategyClass, - ReflectAllowNullAvroDatumProvider.class, - () -> new Tester("Apicurio", null) - ); + testAvroReflect(artifactResolverStrategyClass, ReflectAllowNullAvroDatumProvider.class, + () -> new Tester("Apicurio", null)); } - private void testAvroReflect( - Class artifactResolverStrategyClass, - Class datumProvider, - Supplier testerFactory - ) throws Exception { + private void testAvroReflect(Class artifactResolverStrategyClass, Class datumProvider, + Supplier testerFactory) throws Exception { try (AvroKafkaSerializer serializer = new AvroKafkaSerializer(restClient); - AvroKafkaDeserializer deserializer = new AvroKafkaDeserializer(restClient)) { + AvroKafkaDeserializer deserializer = new AvroKafkaDeserializer(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true"); config.put(SerdeConfig.ENABLE_HEADERS, "false"); config.put(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, datumProvider.getName()); - config.put(SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY, artifactResolverStrategyClass.getName()); + config.put(SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY, + artifactResolverStrategyClass.getName()); serializer.configure(config, false); config = new HashMap<>(); @@ -671,12 +671,14 @@ public void testSerdeMix() throws Exception { record.put("bar", "somebar"); try (KafkaAvroSerializer serializer1 = new KafkaAvroSerializer(schemaClient); - AvroKafkaDeserializer deserializer1 = new AvroKafkaDeserializer(restClient)) { + AvroKafkaDeserializer deserializer1 = new AvroKafkaDeserializer( + restClient)) { byte[] bytes = serializer1.serialize(subject, record); TestUtils.retry(() -> TestUtils.waitForSchema(contentId -> { try { - return restClient.ids().contentIds().byContentId(contentId).get().readAllBytes().length > 0; + return restClient.ids().contentIds().byContentId(contentId).get() + .readAllBytes().length > 0; } catch (IOException e) { throw new RuntimeException(e); } @@ -691,7 +693,8 @@ public void testSerdeMix() throws Exception { } try (KafkaAvroDeserializer deserializer2 = new KafkaAvroDeserializer(schemaClient); - AvroKafkaSerializer serializer2 = new AvroKafkaSerializer(restClient)) { + AvroKafkaSerializer serializer2 = new AvroKafkaSerializer( + restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.USE_ID, IdOption.contentId.name()); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/serde/JsonSchemaSerdeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/serde/JsonSchemaSerdeTest.java index d9c94a43b9..b01d82ad7e 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/serde/JsonSchemaSerdeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/serde/JsonSchemaSerdeTest.java @@ -69,18 +69,20 @@ public void createIsolatedClient() { @Test public void testJsonSchemaSerde() throws Exception { - InputStream jsonSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); + InputStream jsonSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); Assertions.assertNotNull(jsonSchema); String groupId = TestUtils.generateGroupId(); String artifactId = generateArtifactId(); - createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), ContentTypes.APPLICATION_JSON); + createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), + ContentTypes.APPLICATION_JSON); Person person = new Person("Ales", "Justin", 23); try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); - Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { + Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { Map config = new HashMap<>(); config.put(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); @@ -126,7 +128,7 @@ public void testJsonSchemaSerdeAutoRegister() throws Exception { Person person = new Person("Carles", "Arnal", 30); try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); - Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { + Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { Map config = new HashMap<>(); config.put(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); @@ -168,19 +170,20 @@ public void testJsonSchemaSerdeAutoRegister() throws Exception { @Test public void testJsonSchemaSerdeHeaders() throws Exception { - InputStream jsonSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); + InputStream jsonSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/json-schema.json"); Assertions.assertNotNull(jsonSchema); String groupId = TestUtils.generateGroupId(); String artifactId = generateArtifactId(); - Long globalId = createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), ContentTypes.APPLICATION_JSON) - .getVersion().getGlobalId(); + Long globalId = createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); Person person = new Person("Ales", "Justin", 23); try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); - Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { + Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { Map config = new HashMap<>(); config.put(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); @@ -213,19 +216,20 @@ public void testJsonSchemaSerdeHeaders() throws Exception { @Test public void testJsonSchemaSerdeMagicByte() throws Exception { - InputStream jsonSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/json-schema-with-java-type.json"); + InputStream jsonSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/json-schema-with-java-type.json"); Assertions.assertNotNull(jsonSchema); String groupId = TestUtils.generateGroupId(); String artifactId = generateArtifactId(); - Long globalId = createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), ContentTypes.APPLICATION_JSON) - .getVersion().getGlobalId(); + Long globalId = createArtifact(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(jsonSchema), + ContentTypes.APPLICATION_JSON).getVersion().getGlobalId(); Person person = new Person("Ales", "Justin", 23); try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); - Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { + Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { Map config = new HashMap<>(); config.put(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); @@ -253,10 +257,13 @@ public void testJsonSchemaSerdeMagicByte() throws Exception { public void testJsonSchemaSerdeWithReferences() throws Exception { InputStream citySchema = getClass().getResourceAsStream("/io/apicurio/registry/util/city.json"); InputStream citizenSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/citizen.json"); - InputStream citizenIdentifier = getClass().getResourceAsStream("/io/apicurio/registry/util/citizenIdentifier.json"); - InputStream qualificationSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/qualification.json"); + InputStream citizenIdentifier = getClass() + .getResourceAsStream("/io/apicurio/registry/util/citizenIdentifier.json"); + InputStream qualificationSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/qualification.json"); - InputStream addressSchema = getClass().getResourceAsStream("/io/apicurio/registry/util/sample.address.json"); + InputStream addressSchema = getClass() + .getResourceAsStream("/io/apicurio/registry/util/sample.address.json"); Assertions.assertNotNull(citizenSchema); Assertions.assertNotNull(citySchema); @@ -264,41 +271,43 @@ public void testJsonSchemaSerdeWithReferences() throws Exception { Assertions.assertNotNull(qualificationSchema); Assertions.assertNotNull(addressSchema); - String groupId = TestUtils.generateGroupId(); String cityArtifactId = generateArtifactId(); String qualificationsId = generateArtifactId(); String identifierArtifactId = generateArtifactId(); String addressId = generateArtifactId(); + createArtifact(groupId, cityArtifactId, ArtifactType.JSON, IoUtil.toString(citySchema), + ContentTypes.APPLICATION_JSON); - createArtifact(groupId, cityArtifactId, ArtifactType.JSON, IoUtil.toString(citySchema), ContentTypes.APPLICATION_JSON); - - createArtifact(groupId, qualificationsId, ArtifactType.JSON, IoUtil.toString(qualificationSchema), ContentTypes.APPLICATION_JSON); + createArtifact(groupId, qualificationsId, ArtifactType.JSON, IoUtil.toString(qualificationSchema), + ContentTypes.APPLICATION_JSON); - final io.apicurio.registry.rest.v3.beans.ArtifactReference qualificationsReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); + final io.apicurio.registry.rest.v3.beans.ArtifactReference qualificationsReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); qualificationsReference.setVersion("1"); qualificationsReference.setGroupId(groupId); qualificationsReference.setArtifactId(qualificationsId); qualificationsReference.setName("qualification.json"); - createArtifact(groupId, addressId, ArtifactType.JSON, IoUtil.toString(addressSchema), ContentTypes.APPLICATION_JSON); + createArtifact(groupId, addressId, ArtifactType.JSON, IoUtil.toString(addressSchema), + ContentTypes.APPLICATION_JSON); - final io.apicurio.registry.rest.v3.beans.ArtifactReference addressReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); + final io.apicurio.registry.rest.v3.beans.ArtifactReference addressReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); addressReference.setVersion("1"); addressReference.setGroupId(groupId); addressReference.setArtifactId(addressId); addressReference.setName("sample.address.json"); - final io.apicurio.registry.rest.v3.beans.ArtifactReference cityReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); + final io.apicurio.registry.rest.v3.beans.ArtifactReference cityReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); cityReference.setVersion("1"); cityReference.setGroupId(groupId); cityReference.setArtifactId(cityArtifactId); cityReference.setName("city.json"); - createArtifact(groupId, identifierArtifactId, ArtifactType.JSON, IoUtil.toString(citizenIdentifier), ContentTypes.APPLICATION_JSON); + createArtifact(groupId, identifierArtifactId, ArtifactType.JSON, IoUtil.toString(citizenIdentifier), + ContentTypes.APPLICATION_JSON); - final io.apicurio.registry.rest.v3.beans.ArtifactReference identifierReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); + final io.apicurio.registry.rest.v3.beans.ArtifactReference identifierReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); identifierReference.setVersion("1"); identifierReference.setGroupId(groupId); identifierReference.setArtifactId(identifierArtifactId); @@ -307,14 +316,16 @@ public void testJsonSchemaSerdeWithReferences() throws Exception { String artifactId = generateArtifactId(); createArtifactWithReferences(groupId, artifactId, ArtifactType.JSON, IoUtil.toString(citizenSchema), - ContentTypes.APPLICATION_JSON, List.of(qualificationsReference, cityReference, identifierReference, addressReference)); + ContentTypes.APPLICATION_JSON, + List.of(qualificationsReference, cityReference, identifierReference, addressReference)); City city = new City("New York", 10001); CitizenIdentifier identifier = new CitizenIdentifier(123456789); Citizen citizen = new Citizen("Carles", "Arnal", 23, city, identifier, Collections.emptyList()); - try (JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); - Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { + try ( + JsonSchemaKafkaSerializer serializer = new JsonSchemaKafkaSerializer<>(restClient, true); + Deserializer deserializer = new JsonSchemaKafkaDeserializer<>(restClient, true)) { Map config = new HashMap<>(); config.put(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, groupId); @@ -351,8 +362,7 @@ public void testJsonSchemaSerdeWithReferences() throws Exception { } catch (Exception ignored) { } - - //invalid identifier present, should fail + // invalid identifier present, should fail identifier = new CitizenIdentifier(-1234356); citizen.setIdentifier(identifier); @@ -365,16 +375,20 @@ public void testJsonSchemaSerdeWithReferences() throws Exception { } catch (Exception ignored) { } - //no identifier present, should pass + // no identifier present, should pass citizen.setIdentifier(null); serializer.serialize(artifactId, new RecordHeaders(), citizen); - //valid qualification, should pass - citizen.setQualifications(List.of(new Qualification(UUID.randomUUID().toString(), 6), new Qualification(UUID.randomUUID().toString(), 7), new Qualification(UUID.randomUUID().toString(), 8))); + // valid qualification, should pass + citizen.setQualifications(List.of(new Qualification(UUID.randomUUID().toString(), 6), + new Qualification(UUID.randomUUID().toString(), 7), + new Qualification(UUID.randomUUID().toString(), 8))); serializer.serialize(artifactId, new RecordHeaders(), citizen); - //invalid qualification, should fail - citizen.setQualifications(List.of(new Qualification(UUID.randomUUID().toString(), 6), new Qualification(UUID.randomUUID().toString(), -7), new Qualification(UUID.randomUUID().toString(), 8))); + // invalid qualification, should fail + citizen.setQualifications(List.of(new Qualification(UUID.randomUUID().toString(), 6), + new Qualification(UUID.randomUUID().toString(), -7), + new Qualification(UUID.randomUUID().toString(), 8))); try { serializer.serialize(artifactId, new RecordHeaders(), citizen); Assertions.fail(); @@ -404,45 +418,32 @@ public void complexObjectValidation() throws Exception { Assertions.assertNotNull(phone); String schemaContent = new String(address.readAllBytes(), StandardCharsets.UTF_8); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact("sample.address.json", ArtifactType.JSON, schemaContent, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact("sample.address.json", + ArtifactType.JSON, schemaContent, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setVersion(version); - final VersionMetaData amdAddress = - client - .groups() - .byGroupId("GLOBAL") - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; - config.queryParameters.canonical = false; - }) - .getVersion(); - - createArtifact.getFirstVersion().getContent().setContent(new String(email.readAllBytes(), StandardCharsets.UTF_8)); + final VersionMetaData amdAddress = client.groups().byGroupId("GLOBAL").artifacts() + .post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; + config.queryParameters.canonical = false; + }).getVersion(); + + createArtifact.getFirstVersion().getContent() + .setContent(new String(email.readAllBytes(), StandardCharsets.UTF_8)); createArtifact.setArtifactId("sample.email.json"); - final VersionMetaData amdEmail = - client - .groups() - .byGroupId("GLOBAL") - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; - config.queryParameters.canonical = false; - }) - .getVersion(); - - createArtifact.getFirstVersion().getContent().setContent(new String(phone.readAllBytes(), StandardCharsets.UTF_8)); - createArtifact.setArtifactId("sample.phone.json"); - final VersionMetaData amdPhone = - client - .groups() - .byGroupId("GLOBAL") - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; - config.queryParameters.canonical = false; - }) - .getVersion(); + final VersionMetaData amdEmail = client.groups().byGroupId("GLOBAL").artifacts() + .post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; + config.queryParameters.canonical = false; + }).getVersion(); + createArtifact.getFirstVersion().getContent() + .setContent(new String(phone.readAllBytes(), StandardCharsets.UTF_8)); + createArtifact.setArtifactId("sample.phone.json"); + final VersionMetaData amdPhone = client.groups().byGroupId("GLOBAL").artifacts() + .post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; + config.queryParameters.canonical = false; + }).getVersion(); final ArtifactReference addressReference = new ArtifactReference(); addressReference.setVersion(amdAddress.getVersion()); @@ -468,34 +469,32 @@ public void complexObjectValidation() throws Exception { artifactReferences.add(emailReference); artifactReferences.add(phoneReference); - createArtifact.getFirstVersion().getContent().setContent(new String(account.readAllBytes(), StandardCharsets.UTF_8)); + createArtifact.getFirstVersion().getContent() + .setContent(new String(account.readAllBytes(), StandardCharsets.UTF_8)); createArtifact.getFirstVersion().getContent().setReferences(artifactReferences); createArtifact.setArtifactId("sample.account.json"); - client - .groups() - .byGroupId("GLOBAL") - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; - config.queryParameters.canonical = false; - }); + client.groups().byGroupId("GLOBAL").artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.CREATE_VERSION; + config.queryParameters.canonical = false; + }); String data = "{\n" + " \"id\": \"abc\",\n" + " \n" + " \"accountPhones\": [{\n" + " \"phoneRelationTypeCd\": \"ABCDEFGHIJ\",\n" + " \"effectiveDate\": \"201-09-29T18:46:19Z\"\n" + " \n" + " \n" + " }]\n" + "}"; - ObjectMapper objectMapper = new ObjectMapper().configure( - DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + ObjectMapper objectMapper = new ObjectMapper() + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); JsonNode validationFor = objectMapper.readTree(data); - VersionMetaData global = - client.groups().byGroupId("GLOBAL").artifacts().byArtifactId("sample.account.json").versions().byVersionExpression("branch=latest").get(); - // client.getArtifactMetaData("GLOBAL", "sample.account.json"); - io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference = io.apicurio.registry.resolver.strategy.ArtifactReference.builder().globalId(global.getGlobalId()) - .groupId("GLOBAL")//.version("4") + VersionMetaData global = client.groups().byGroupId("GLOBAL").artifacts() + .byArtifactId("sample.account.json").versions().byVersionExpression("branch=latest").get(); + // client.getArtifactMetaData("GLOBAL", "sample.account.json"); + io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference = io.apicurio.registry.resolver.strategy.ArtifactReference + .builder().globalId(global.getGlobalId()).groupId("GLOBAL")// .version("4") .artifactId("sample.account.json").build(); - SchemaResolverConfigurer src = new SchemaResolverConfigurer(client); + SchemaResolverConfigurer src = new SchemaResolverConfigurer( + client); SchemaResolver sr = src.getSchemaResolver(); Map configs = new HashMap<>(); @@ -503,13 +502,14 @@ public void complexObjectValidation() throws Exception { DefaultSchemaResolver.class.getName()); configs.put(SchemaResolverConfig.CHECK_PERIOD_MS, "600000"); sr.configure(configs, new JsonSchemaParser()); - ParsedSchema ps = sr.resolveSchemaByArtifactReference((artifactReference)).getParsedSchema(); + ParsedSchema ps = sr.resolveSchemaByArtifactReference((artifactReference)) + .getParsedSchema(); validateDataWithSchema(ps, objectMapper.writeValueAsBytes(validationFor), objectMapper); } protected static void validateDataWithSchema(ParsedSchema schema, byte[] data, - ObjectMapper mapper) throws IOException { + ObjectMapper mapper) throws IOException { try { schema.getParsedSchema().validate(mapper.readTree(data)); } catch (ValidationException e) { diff --git a/app/src/test/java/io/apicurio/registry/noprofile/serde/ProtobufSerdeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/serde/ProtobufSerdeTest.java index 1d5e50cf16..f4749b7b5e 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/serde/ProtobufSerdeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/serde/ProtobufSerdeTest.java @@ -46,7 +46,7 @@ public void createIsolatedClient() { @Test public void testProto() throws Exception { try (Serializer serializer = new ProtobufKafkaSerializer<>(restClient); - Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { + Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class); @@ -65,13 +65,12 @@ public void testProto() throws Exception { waitForSchema(globalId -> { try { if (restClient.ids().globalIds().byGlobalId(globalId).get().readAllBytes().length > 0) { - VersionMetaData artifactMetadata = restClient.groups().byGroupId(groupId).artifacts().byArtifactId(topic).versions() - .byVersionExpression("branch=latest").get(); + VersionMetaData artifactMetadata = restClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(topic).versions().byVersionExpression("branch=latest").get(); assertEquals(globalId, artifactMetadata.getGlobalId()); return true; } - } - catch (IOException e) { + } catch (IOException e) { throw new RuntimeException(e); } return false; @@ -86,7 +85,7 @@ public void testProto() throws Exception { @Test public void testProtobufSchemaWithReferences() { try (Serializer serializer = new ProtobufKafkaSerializer<>(restClient); - Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { + Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class); @@ -106,7 +105,7 @@ public void testProtobufSchemaWithReferences() { @Test public void testProtobufSchemaWithReferencesDereferenced() { try (Serializer serializer = new ProtobufKafkaSerializer<>(restClient); - Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { + Deserializer deserializer = new ProtobufKafkaDeserializer(restClient)) { Map config = new HashMap<>(); config.put(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, SimpleTopicIdStrategy.class); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/storage/AbstractRegistryStorageTest.java b/app/src/test/java/io/apicurio/registry/noprofile/storage/AbstractRegistryStorageTest.java index a43b199ad8..7451b29ef7 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/storage/AbstractRegistryStorageTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/storage/AbstractRegistryStorageTest.java @@ -9,1065 +9,1117 @@ public abstract class AbstractRegistryStorageTest extends AbstractResourceTestBa private static final String GROUP_ID = AbstractRegistryStorageTest.class.getSimpleName(); - protected static final String OPENAPI_CONTENT = "{" + - " \"openapi\": \"3.0.2\"," + - " \"info\": {" + - " \"title\": \"Empty API\"," + - " \"version\": \"1.0.0\"," + - " \"description\": \"An example API design using OpenAPI.\"" + - " }" + - "}"; - protected static final String OPENAPI_CONTENT_V2 = "{" + - " \"openapi\": \"3.0.2\"," + - " \"info\": {" + - " \"title\": \"Empty API 2\"," + - " \"version\": \"1.0.1\"," + - " \"description\": \"An example API design using OpenAPI.\"" + - " }" + - "}"; - protected static final String OPENAPI_CONTENT_TEMPLATE = "{" + - " \"openapi\": \"3.0.2\"," + - " \"info\": {" + - " \"title\": \"Empty API 2\"," + - " \"version\": \"VERSION\"," + - " \"description\": \"An example API design using OpenAPI.\"" + - " }" + - "}"; + protected static final String OPENAPI_CONTENT = "{" + " \"openapi\": \"3.0.2\"," + " \"info\": {" + + " \"title\": \"Empty API\"," + " \"version\": \"1.0.0\"," + + " \"description\": \"An example API design using OpenAPI.\"" + " }" + "}"; + protected static final String OPENAPI_CONTENT_V2 = "{" + " \"openapi\": \"3.0.2\"," + " \"info\": {" + + " \"title\": \"Empty API 2\"," + " \"version\": \"1.0.1\"," + + " \"description\": \"An example API design using OpenAPI.\"" + " }" + "}"; + protected static final String OPENAPI_CONTENT_TEMPLATE = "{" + " \"openapi\": \"3.0.2\"," + + " \"info\": {" + " \"title\": \"Empty API 2\"," + " \"version\": \"VERSION\"," + + " \"description\": \"An example API design using OpenAPI.\"" + " }" + "}"; @Inject Logger log; /** - * Gets the artifactStore to use. Subclasses must provide this. + * Gets the artifactStore to use. Subclasses must provide this. */ protected abstract RegistryStorage storage(); -// -// @Test -// public void testGetArtifactIds() throws Exception { -// -// int size = storage().getArtifactIds(null).size(); -// -// String artifactIdPrefix = "testGetArtifactIds-"; -// for (int idx = 1; idx <= 10; idx++) { -// String artifactId = artifactIdPrefix + idx; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// //Verify group metadata is also created -// GroupMetaDataDto groupMetaDataDto = storage().getGroupMetaData(GROUP_ID); -// Assertions.assertNotNull(groupMetaDataDto); -// Assertions.assertEquals(GROUP_ID, groupMetaDataDto.getGroupId()); -// } -// -// int newsize = storage().getArtifactIds(null).size(); -// int newids = newsize - size; -// Assertions.assertEquals(10, newids); -// } -// -// @Test -// public void testCreateArtifact() throws Exception { -// String artifactId = "testCreateArtifact-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, dto.getVersion()); -// Assertions.assertNotNull(storedArtifact); -// Assertions.assertEquals(OPENAPI_CONTENT, storedArtifact.getContent().content()); -// Assertions.assertEquals(dto.getGlobalId(), storedArtifact.getGlobalId()); -// Assertions.assertEquals(dto.getVersion(), storedArtifact.getVersion()); -// -// ArtifactMetaDataDto amdDto = storage().getArtifactMetaData(GROUP_ID, artifactId); -// Assertions.assertNotNull(amdDto); -// Assertions.assertEquals("Empty API", amdDto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", amdDto.getDescription()); -// Assertions.assertNull(amdDto.getLabels()); -// -// ArtifactVersionMetaDataDto versionMetaDataDto = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); -// Assertions.assertNotNull(versionMetaDataDto); -// Assertions.assertEquals(dto.getGlobalId(), versionMetaDataDto.getGlobalId()); -// Assertions.assertEquals("Empty API", versionMetaDataDto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", versionMetaDataDto.getDescription()); -// Assertions.assertEquals(VersionState.ENABLED, versionMetaDataDto.getState()); -// Assertions.assertEquals("1", versionMetaDataDto.getVersion()); -// -// StoredArtifactVersionDto storedVersion = storage().getArtifactVersionContent(dto.getGlobalId()); -// Assertions.assertNotNull(storedVersion); -// Assertions.assertEquals(OPENAPI_CONTENT, storedVersion.getContent().content()); -// Assertions.assertEquals(dto.getGlobalId(), storedVersion.getGlobalId()); -// Assertions.assertEquals(dto.getVersion(), storedVersion.getVersion()); -// -// storedVersion = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// Assertions.assertNotNull(storedVersion); -// Assertions.assertEquals(OPENAPI_CONTENT, storedVersion.getContent().content()); -// Assertions.assertEquals(dto.getGlobalId(), storedVersion.getGlobalId()); -// Assertions.assertEquals(dto.getVersion(), storedVersion.getVersion()); -// -// List versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertEquals("1", versions.iterator().next()); -// } -// -// @Test -// public void testCreateArtifactWithMetaData() throws Exception { -// String artifactId = "testCreateArtifactWithMetaData-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto( -// "NAME", "DESCRIPTION", null, Collections.singletonMap("KEY", "VALUE") -// ); -// ArtifactVersionMetaDataDto dto = storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, metaData, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("NAME", dto.getName()); -// Assertions.assertEquals("DESCRIPTION", dto.getDescription()); -// Assertions.assertNotNull(dto.getLabels()); -// Assertions.assertEquals(metaData.getLabels(), dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// Assertions.assertNotNull(storedArtifact); -// Assertions.assertEquals(OPENAPI_CONTENT, storedArtifact.getContent().content()); -// Assertions.assertEquals(dto.getGlobalId(), storedArtifact.getGlobalId()); -// Assertions.assertEquals(dto.getVersion(), storedArtifact.getVersion()); -// -// ArtifactMetaDataDto amdDto = storage().getArtifactMetaData(GROUP_ID, artifactId); -// Assertions.assertNotNull(amdDto); -// Assertions.assertEquals("NAME", amdDto.getName()); -// Assertions.assertEquals("DESCRIPTION", amdDto.getDescription()); -// Assertions.assertEquals(metaData.getLabels(), amdDto.getLabels()); -// } -// -// @Test -// public void testCreateArtifactWithLargeMetaData() throws Exception { -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// -// // Test creating an artifact with meta-data that is too large for the DB -// String artifactId = "testCreateArtifactWithLargeMetaData"; -// EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto(); -// metaData.setName(generateString(600)); -// metaData.setDescription(generateString(2000)); -// metaData.setLabels(new HashMap<>()); -// metaData.getLabels().put("key-" + generateString(300), "value-" + generateString(2000)); -// ArtifactVersionMetaDataDto dto = storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, metaData, null); -// -// dto = storage().getArtifactVersionMetaData(dto.getGlobalId()); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals(512, dto.getName().length()); -// Assertions.assertEquals(1024, dto.getDescription().length()); -// Assertions.assertTrue(dto.getDescription().endsWith("...")); -// Assertions.assertNotNull(dto.getLabels()); -// Assertions.assertEquals(1, dto.getLabels().size()); -// } -// -// @Test -// public void testCreateDuplicateArtifact() throws Exception { -// String artifactId = "testCreateDuplicateArtifact-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// -// // Should throw error for duplicate artifact. -// Assertions.assertThrows(ArtifactAlreadyExistsException.class, () -> { -// storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// }); -// } -// -// @Test -// public void testArtifactNotFound() throws Exception { -// String artifactId = "testArtifactNotFound-1"; -// -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// }); -// -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactMetaData(GROUP_ID, artifactId); -// }); -// -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// }); -// -// Assertions.assertThrows(VersionNotFoundException.class, () -> { -// storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); -// }); -// } -// -// @Test -// public void testCreateArtifactVersion() throws Exception { -// String artifactId = "testCreateArtifactVersion-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// List versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(1, versions.size()); -// -// ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, null); -// Assertions.assertNotNull(dtov2); -// Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); -// Assertions.assertEquals(artifactId, dtov2.getArtifactId()); -// Assertions.assertEquals("2", dtov2.getVersion()); -// -// versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(2, versions.size()); -// } -// -// @Test -// public void testGetArtifactVersions() throws Exception { -// String artifactId = "testGetArtifactVersions"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); -// -// storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); -// -// storedArtifact = storage().getArtifactVersionContent(dto.getGlobalId()); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); -// -// ArtifactVersionMetaDataDto dtov1 = storage().getArtifactVersionMetaData(dto.getGlobalId()); -// verifyArtifactMetadata(dtov1, dto); -// -// List versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(1, versions.size()); -// -// ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, null); -// Assertions.assertNotNull(dtov2); -// Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); -// Assertions.assertEquals(artifactId, dtov2.getArtifactId()); -// Assertions.assertEquals("2", dtov2.getVersion()); -// -// versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(2, versions.size()); -// -// //verify version 2 -// -// storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "2"); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT_V2, dtov2); -// -// storedArtifact = storage().getArtifactVersionContent(dtov2.getGlobalId()); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT_V2, dtov2); -// -// ArtifactVersionMetaDataDto dtov2Stored = storage().getArtifactVersionMetaData(dtov2.getGlobalId()); -// verifyArtifactMetadata(dtov2Stored, dtov2); -// -// // verify version 1 again -// -// storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); -// -// storedArtifact = storage().getArtifactVersionContent(dto.getGlobalId()); -// verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); -// -// dtov1 = storage().getArtifactVersionMetaData(dto.getGlobalId()); -// verifyArtifactMetadata(dtov1, dto); -// -// } -// -// private void verifyArtifact(StoredArtifactVersionDto storedArtifact, String content, ArtifactVersionMetaDataDto expectedMetadata) { -// Assertions.assertNotNull(storedArtifact); -// Assertions.assertEquals(content, storedArtifact.getContent().content()); -// Assertions.assertEquals(expectedMetadata.getGlobalId(), storedArtifact.getGlobalId()); -// Assertions.assertEquals(expectedMetadata.getVersion(), storedArtifact.getVersion()); -// } -// -// private void verifyArtifactMetadata(ArtifactVersionMetaDataDto actualMetadata, ArtifactVersionMetaDataDto expectedMetadata) { -// Assertions.assertNotNull(actualMetadata); -// Assertions.assertNotNull(expectedMetadata); -// Assertions.assertEquals(expectedMetadata.getName(), actualMetadata.getName()); -// Assertions.assertEquals(expectedMetadata.getDescription(), actualMetadata.getDescription()); -// } -// -// @Test -// public void testCreateArtifactVersionWithMetaData() throws Exception { -// String artifactId = "testCreateArtifactVersionWithMetaData-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// List versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(1, versions.size()); -// -// ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// EditableVersionMetaDataDto metaData = new EditableVersionMetaDataDto("NAME", "DESC", null, Collections.singletonMap("K", "V")); -// ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersionWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, -// contentv2, metaData, null); -// Assertions.assertNotNull(dtov2); -// Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); -// Assertions.assertEquals(artifactId, dtov2.getArtifactId()); -// Assertions.assertEquals("2", dtov2.getVersion()); -// Assertions.assertEquals("NAME", dtov2.getName()); -// Assertions.assertEquals("DESC", dtov2.getDescription()); -// Assertions.assertEquals(metaData.getLabels(), dtov2.getLabels()); -// -// versions = storage().getArtifactVersions(GROUP_ID, artifactId); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(2, versions.size()); -// -// ArtifactVersionMetaDataDto vmd = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "2"); -// Assertions.assertNotNull(vmd); -// Assertions.assertEquals("NAME", vmd.getName()); -// Assertions.assertEquals("DESC", vmd.getDescription()); -// } -// -// @Test -// public void testGetArtifactMetaDataByGlobalId() throws Exception { -// String artifactId = "testGetArtifactMetaDataByGlobalId-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// long globalId = dto.getGlobalId(); -// -// dto = storage().getArtifactVersionMetaData(globalId); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// } -// -// @Test -// public void testUpdateArtifactMetaData() throws Exception { -// String artifactId = "testUpdateArtifactMetaData-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// String newName = "Updated Name"; -// String newDescription = "Updated description."; -// Map newLabels = new HashMap<>(); -// newLabels.put("foo", "bar"); -// newLabels.put("ting", "bin"); -// EditableArtifactMetaDataDto emd = new EditableArtifactMetaDataDto(newName, newDescription, null, newLabels); -// storage().updateArtifactMetaData(GROUP_ID, artifactId, emd); -// -// ArtifactMetaDataDto metaData = storage().getArtifactMetaData(GROUP_ID, artifactId); -// Assertions.assertNotNull(metaData); -// Assertions.assertEquals(newName, metaData.getName()); -// Assertions.assertEquals(newDescription, metaData.getDescription()); -// } -// -// @Test -// public void testUpdateArtifactVersionState() throws Exception { -// String artifactId = "testUpdateArtifactVersionState-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// -// ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, null); -// Assertions.assertNotNull(dtov2); -// Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); -// Assertions.assertEquals(artifactId, dtov2.getArtifactId()); -// Assertions.assertEquals("2", dtov2.getVersion()); -// -// updateVersionState(GROUP_ID, artifactId, "1", VersionState.DISABLED); -// updateVersionState(GROUP_ID, artifactId, "2", VersionState.DEPRECATED); -// -// ArtifactVersionMetaDataDto v1 = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); -// ArtifactVersionMetaDataDto v2 = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "2"); -// Assertions.assertNotNull(v1); -// Assertions.assertNotNull(v2); -// Assertions.assertEquals(VersionState.DISABLED, v1.getState()); -// Assertions.assertEquals(VersionState.DEPRECATED, v2.getState()); -// } -// -// @Test -// public void testUpdateArtifactVersionMetaData() throws Exception { -// String artifactId = "testUpdateArtifactVersionMetaData-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// String newName = "Updated Name"; -// String newDescription = "Updated description."; -// Map newLabels = new HashMap<>(); -// newLabels.put("foo", "bar"); -// newLabels.put("ting", "bin"); -// EditableVersionMetaDataDto emd = new EditableVersionMetaDataDto(newName, newDescription, null, newLabels); -// storage().updateArtifactVersionMetaData(GROUP_ID, artifactId, "1", emd); -// -// ArtifactVersionMetaDataDto metaData = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); -// Assertions.assertNotNull(metaData); -// Assertions.assertEquals(newName, metaData.getName()); -// Assertions.assertEquals(newDescription, metaData.getDescription()); -// } -// -// @Test -// public void testDeleteArtifact() throws Exception { -// String artifactId = "testDeleteArtifact-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// Assertions.assertEquals("Empty API", dto.getName()); -// Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); -// Assertions.assertNull(dto.getLabels()); -// Assertions.assertEquals("1", dto.getVersion()); -// -// storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// -// storage().deleteArtifact(GROUP_ID, artifactId); -// -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// }); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactMetaData(GROUP_ID, artifactId); -// }); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); -// }); -// Assertions.assertThrows(VersionNotFoundException.class, () -> { -// storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); -// }); -// } -// -// @Test -// public void testDeleteArtifactVersion() throws Exception { -// // Delete the only version -// //////////////////////////// -// String artifactId = "testDeleteArtifactVersion-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals("1", dto.getVersion()); -// -// storage().deleteArtifactVersion(GROUP_ID, artifactId, "1"); -// -// final String aid1 = artifactId; -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, aid1, "1"); -// }); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactMetaData(GROUP_ID, aid1); -// }); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, aid1, "1"); -// }); -// Assertions.assertThrows(VersionNotFoundException.class, () -> { -// storage().getArtifactVersionMetaData(GROUP_ID, aid1, "1"); -// }); -// -// // Delete one of multiple versions -// artifactId = "testDeleteArtifactVersion-2"; -// content = ContentHandle.create(OPENAPI_CONTENT); -// dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals("1", dto.getVersion()); -// -// ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, null); -// Assertions.assertNotNull(dtov2); -// Assertions.assertEquals("2", dtov2.getVersion()); -// -// storage().deleteArtifactVersion(GROUP_ID, artifactId, "1"); -// -// final String aid2 = artifactId; -// -// storage().getArtifactMetaData(GROUP_ID, aid2); -// storage().getArtifactVersionContent(GROUP_ID, aid2, "2"); -// storage().getArtifactVersionMetaData(GROUP_ID, aid2, "2"); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, aid2, "1"); -// }); -// Assertions.assertThrows(VersionNotFoundException.class, () -> { -// storage().getArtifactVersionMetaData(GROUP_ID, aid2, "1"); -// }); -// -// ArtifactVersionMetaDataDto dtov3 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dtov3); -// Assertions.assertEquals("3", dtov3.getVersion()); -// -// // Update version 2 to DISABLED state and delete latest version -// updateVersionState(GROUP_ID, artifactId, "2", VersionState.DISABLED); -// storage().deleteArtifactVersion(GROUP_ID, artifactId, "3"); -// -// GAV latestGAV = storage().getBranchTip(new GA(GROUP_ID, artifactId), BranchId.LATEST, DEFAULT); -// ArtifactVersionMetaDataDto artifactMetaData = storage().getArtifactVersionMetaData(GROUP_ID, aid2, latestGAV.getRawVersionId()); -// Assertions.assertNotNull(artifactMetaData); -// Assertions.assertEquals("2", artifactMetaData.getVersion()); -// Assertions.assertEquals(aid2, artifactMetaData.getArtifactId()); -// -// // Delete the latest version -// artifactId = "testDeleteArtifactVersion-3"; -// content = ContentHandle.create(OPENAPI_CONTENT); -// dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals("1", dto.getVersion()); -// -// contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); -// dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, null); -// Assertions.assertNotNull(dtov2); -// -// final String aid3 = artifactId; -// storage().deleteArtifactVersion(GROUP_ID, aid3, "2"); -// List versions = storage().getArtifactVersions(GROUP_ID, aid3); -// Assertions.assertNotNull(versions); -// Assertions.assertFalse(versions.isEmpty()); -// Assertions.assertEquals(1, versions.size()); -// Assertions.assertEquals("1", versions.iterator().next()); -// -// VersionSearchResultsDto result = storage().searchVersions(GROUP_ID, aid3, 0, 10); -// Assertions.assertNotNull(result); -// Assertions.assertEquals(1, result.getCount()); -// Assertions.assertEquals("1", result.getVersions().iterator().next().getVersion()); -// -// artifactMetaData = storage().getArtifactVersionMetaData(GROUP_ID, aid3, "1"); -// Assertions.assertNotNull(artifactMetaData); -// Assertions.assertEquals("1", artifactMetaData.getVersion()); -// Assertions.assertEquals(aid3, artifactMetaData.getArtifactId()); -// -// storage().getArtifactVersionContent(GROUP_ID, aid3, "1"); -// ArtifactVersionMetaDataDto metaData = storage().getArtifactVersionMetaData(GROUP_ID, aid3, "1"); -// Assertions.assertNotNull(metaData); -// Assertions.assertEquals("1", metaData.getVersion()); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactVersionContent(GROUP_ID, aid3, "2"); -// }); -// Assertions.assertThrows(VersionNotFoundException.class, () -> { -// storage().getArtifactVersionMetaData(GROUP_ID, aid3, "2"); -// }); -// -// // Delete the only artifact version left - same as deleting the whole artifact -// storage().deleteArtifactVersion(GROUP_ID, aid3, "1"); -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> { -// storage().getArtifactMetaData(GROUP_ID, aid3); -// }); -// } -// -// @Test -// public void testCreateArtifactRule() throws Exception { -// String artifactId = "testCreateArtifactRule-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// List artifactRules = storage().getArtifactRules(GROUP_ID, artifactId); -// Assertions.assertNotNull(artifactRules); -// Assertions.assertTrue(artifactRules.isEmpty()); -// -// RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); -// storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); -// -// artifactRules = storage().getArtifactRules(GROUP_ID, artifactId); -// Assertions.assertNotNull(artifactRules); -// Assertions.assertFalse(artifactRules.isEmpty()); -// Assertions.assertEquals(1, artifactRules.size()); -// Assertions.assertEquals(RuleType.VALIDITY, artifactRules.get(0)); -// } -// -// @Test -// public void testUpdateArtifactRule() throws Exception { -// String artifactId = "testUpdateArtifactRule-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); -// storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); -// -// RuleConfigurationDto rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// Assertions.assertNotNull(rule); -// Assertions.assertEquals("FULL", rule.getConfiguration()); -// -// RuleConfigurationDto updatedConfig = new RuleConfigurationDto("NONE"); -// storage().updateArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, updatedConfig); -// -// rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// Assertions.assertNotNull(rule); -// Assertions.assertEquals("NONE", rule.getConfiguration()); -// } -// -// @Test -// public void testDeleteArtifactRule() throws Exception { -// String artifactId = "testDeleteArtifactRule-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); -// storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); -// -// RuleConfigurationDto rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// Assertions.assertNotNull(rule); -// Assertions.assertEquals("FULL", rule.getConfiguration()); -// -// storage().deleteArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// -// Assertions.assertThrows(RuleNotFoundException.class, () -> { -// storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// }); -// } -// -// @Test -// public void testDeleteAllArtifactRules() throws Exception { -// String artifactId = "testDeleteAllArtifactRulse-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); -// storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); -// storage().createArtifactRule(GROUP_ID, artifactId, RuleType.COMPATIBILITY, configDto); -// -// List rules = storage().getArtifactRules(GROUP_ID, artifactId); -// Assertions.assertEquals(2, rules.size()); -// -// storage().deleteArtifactRules(GROUP_ID, artifactId); -// -// Assertions.assertThrows(RuleNotFoundException.class, () -> { -// storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); -// }); -// Assertions.assertThrows(RuleNotFoundException.class, () -> { -// storage().getArtifactRule(GROUP_ID, artifactId, RuleType.COMPATIBILITY); -// }); -// } -// -// @Test -// public void testGlobalRules() { -// List globalRules = storage().getGlobalRules(); -// Assertions.assertNotNull(globalRules); -// Assertions.assertTrue(globalRules.isEmpty()); -// -// RuleConfigurationDto config = new RuleConfigurationDto(); -// config.setConfiguration("FULL"); -// storage().createGlobalRule(RuleType.COMPATIBILITY, config); -// -// RuleConfigurationDto rule = storage().getGlobalRule(RuleType.COMPATIBILITY); -// Assertions.assertEquals(rule.getConfiguration(), config.getConfiguration()); -// -// globalRules = storage().getGlobalRules(); -// Assertions.assertNotNull(globalRules); -// Assertions.assertFalse(globalRules.isEmpty()); -// Assertions.assertEquals(globalRules.size(), 1); -// Assertions.assertEquals(globalRules.get(0), RuleType.COMPATIBILITY); -// -// Assertions.assertThrows(RuleAlreadyExistsException.class, () -> { -// storage().createGlobalRule(RuleType.COMPATIBILITY, config); -// }); -// -// RuleConfigurationDto updatedConfig = new RuleConfigurationDto("FORWARD"); -// storage().updateGlobalRule(RuleType.COMPATIBILITY, updatedConfig); -// -// rule = storage().getGlobalRule(RuleType.COMPATIBILITY); -// Assertions.assertEquals(rule.getConfiguration(), updatedConfig.getConfiguration()); -// -// Assertions.assertThrows(RuleNotFoundException.class, () -> { -// storage().updateGlobalRule(RuleType.VALIDITY, config); -// }); -// -// storage().deleteGlobalRules(); -// globalRules = storage().getGlobalRules(); -// Assertions.assertNotNull(globalRules); -// Assertions.assertTrue(globalRules.isEmpty()); -// -// storage().createGlobalRule(RuleType.COMPATIBILITY, config); -// storage().deleteGlobalRule(RuleType.COMPATIBILITY); -// globalRules = storage().getGlobalRules(); -// Assertions.assertNotNull(globalRules); -// Assertions.assertTrue(globalRules.isEmpty()); -// } -// -// @Test -// public void testSearchArtifacts() throws Exception { -// String artifactIdPrefix = "testSearchArtifacts-"; -// for (int idx = 1; idx <= 50; idx++) { -// String idxs = (idx < 10 ? "0" : "") + idx; -// String artifactId = artifactIdPrefix + idxs; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// Map labels = Collections.singletonMap("key", "value-" + idx); -// EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto( -// artifactId + "-name", -// artifactId + "-description", -// null, -// labels); -// storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, metaData, null); -// } -// -// long start = System.currentTimeMillis(); -// -// Set filters = Collections.singleton(SearchFilter.ofName("testSearchArtifacts")); -// ArtifactSearchResultsDto results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(50, results.getCount()); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(10, results.getArtifacts().size()); -// -// -// filters = Collections.singleton(SearchFilter.ofName("testSearchArtifacts-19-name")); -// results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(1, results.getCount()); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(1, results.getArtifacts().size()); -// Assertions.assertEquals("testSearchArtifacts-19-name", results.getArtifacts().get(0).getName()); -// -// -// filters = Collections.singleton(SearchFilter.ofDescription("testSearchArtifacts-33-description")); -// results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(1, results.getCount()); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(1, results.getArtifacts().size()); -// Assertions.assertEquals("testSearchArtifacts-33-name", results.getArtifacts().get(0).getName()); -// -// -// filters = Collections.emptySet(); -// results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(10, results.getArtifacts().size()); -// -// -// filters = Collections.singleton(SearchFilter.ofEverything("testSearchArtifacts")); -// results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 1000); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(50, results.getCount()); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(50, results.getArtifacts().size()); -// Assertions.assertEquals("testSearchArtifacts-01-name", results.getArtifacts().get(0).getName()); -// Assertions.assertEquals("testSearchArtifacts-02-name", results.getArtifacts().get(1).getName()); -// -// -// filters = Collections.singleton(SearchFilter.ofLabel("key", "value-17")); -// results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(1, results.getCount()); -// Assertions.assertNotNull(results.getArtifacts()); -// Assertions.assertEquals(1, results.getArtifacts().size()); -// Assertions.assertEquals("testSearchArtifacts-17-name", results.getArtifacts().get(0).getName()); -// -// -// long end = System.currentTimeMillis(); -// System.out.println("Search time: " + (end - start) + "ms"); -// } -// -// @Test -// public void testSearchVersions() throws Exception { -// String artifactId = "testSearchVersions-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// // Add more versions -// for (int idx = 2; idx <= 50; idx++) { -// content = ContentHandle.create(OPENAPI_CONTENT_TEMPLATE.replaceAll("VERSION", "1.0." + idx)); -// EditableVersionMetaDataDto metaData = new EditableVersionMetaDataDto( -// artifactId + "-name-" + idx, -// artifactId + "-description-" + idx, -// null, null); -// storage().createArtifactVersionWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, metaData, null); -// } -// -// TestUtils.retry(() -> { -// VersionSearchResultsDto results = storage().searchVersions(GROUP_ID, artifactId, 0, 10); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(50, results.getCount()); -// Assertions.assertEquals(10, results.getVersions().size()); -// -// results = storage().searchVersions(GROUP_ID, artifactId, 0, 1000); -// Assertions.assertNotNull(results); -// Assertions.assertEquals(50, results.getCount()); -// Assertions.assertEquals(50, results.getVersions().size()); -// }); -// } -// -// private void createSomeUserData() { -// final String group1 = "testGroup-1"; -// final String group2 = "testGroup-2"; -// final String artifactId1 = "testArtifact-1"; -// final String artifactId2 = "testArtifact-2"; -// final String principal = "testPrincipal"; -// final String role = "testRole"; -// -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// // storage().createGroup(GroupMetaDataDto.builder().groupId(group1).build()); -// // ^ TODO Uncomment after https://github.com/Apicurio/apicurio-registry/issues/1721 -// ArtifactVersionMetaDataDto artifactDto1 = storage().createArtifact(group1, artifactId1, null, ArtifactType.OPENAPI, content, null); -// storage().createArtifactRule(group1, artifactId1, RuleType.VALIDITY, RuleConfigurationDto.builder().configuration("FULL").build()); -// ArtifactVersionMetaDataDto artifactDto2 = storage().createArtifactWithMetadata( -// group2, artifactId2, null, ArtifactType.OPENAPI, content, EditableArtifactMetaDataDto.builder().name("test").build(), null); -// storage().createGlobalRule(RuleType.VALIDITY, RuleConfigurationDto.builder().configuration("FULL").build()); -// storage().createRoleMapping(principal, role, null); -// -// // Verify data exists -// -// Assertions.assertNotNull(storage().getArtifactVersionContent(group1, artifactId1, artifactDto1.getVersion())); -// Assertions.assertEquals(1, storage().getArtifactRules(group1, artifactId1).size()); -// Assertions.assertNotNull(storage().getArtifactVersionContent(group2, artifactId2, artifactDto2.getVersion())); -// Assertions.assertEquals(1, storage().getGlobalRules().size()); -// Assertions.assertEquals(role, storage().getRoleForPrincipal(principal)); -// } -// -// private int countStorageEntities() { -// // We don't need thread safety, but it's simpler to use this when effectively final counter is needed -// final AtomicInteger count = new AtomicInteger(0); -// storage().exportData(e -> { -// if (e.getEntityType() != EntityType.Manifest) { -// log.debug("Counting from export: {}", e); -// count.incrementAndGet(); -// } -// return null; -// }); -// int res = count.get(); -// // Count data that is not exported -// res += storage().getRoleMappings().size(); -// return res; -// } -// -// @Test -// public void testDeleteAllUserData() { -// // Delete first to cleanup after other tests -// storage().deleteAllUserData(); -// createSomeUserData(); -// Assertions.assertEquals(10, countStorageEntities()); -// // ^ TODO Change to 9 after https://github.com/Apicurio/apicurio-registry/issues/1721 -// // Delete all -// storage().deleteAllUserData(); -// Assertions.assertEquals(0, countStorageEntities()); -// } -// -// @Test -// public void testConfigProperties() throws Exception { -// List properties = storage().getConfigProperties(); -// Assertions.assertNotNull(properties); -// Assertions.assertTrue(properties.isEmpty()); -// -// storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-string", "test-value")); -// storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-boolean", "true")); -// storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-long", "12345")); -// -// properties = storage().getConfigProperties(); -// Assertions.assertNotNull(properties); -// Assertions.assertFalse(properties.isEmpty()); -// Assertions.assertEquals(3, properties.size()); -// -// DynamicConfigPropertyDto stringProp = getProperty(properties, "apicurio.test.property-string"); -// DynamicConfigPropertyDto boolProp = getProperty(properties, "apicurio.test.property-boolean"); -// DynamicConfigPropertyDto longProp = getProperty(properties, "apicurio.test.property-long"); -// -// Assertions.assertNotNull(stringProp); -// Assertions.assertNotNull(boolProp); -// Assertions.assertNotNull(longProp); -// -// Assertions.assertEquals("test-value", stringProp.getValue()); -// Assertions.assertEquals("true", boolProp.getValue()); -// Assertions.assertEquals("12345", longProp.getValue()); -// } -// -// private DynamicConfigPropertyDto getProperty(List properties, String propertyName) { -// for (DynamicConfigPropertyDto prop : properties) { -// if (prop.getName().equals(propertyName)) { -// return prop; -// } -// } -// return null; -// } -// -// -// @Test -// public void testComments() { -// String artifactId = "testComments-1"; -// ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dto); -// Assertions.assertEquals(GROUP_ID, dto.getGroupId()); -// Assertions.assertEquals(artifactId, dto.getArtifactId()); -// -// List comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); -// Assertions.assertTrue(comments.isEmpty()); -// -// storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_1"); -// storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_2"); -// storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_3"); -// -// comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); -// Assertions.assertEquals(3, comments.size()); -// -// storage().deleteArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), comments.get(1).getCommentId()); -// -// comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); -// Assertions.assertEquals(2, comments.size()); -// -// storage().updateArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), comments.get(0).getCommentId(), "TEST_COMMENT_4"); -// -// comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); -// Assertions.assertEquals(2, comments.size()); -// Assertions.assertEquals("TEST_COMMENT_4", comments.get(0).getValue()); -// } -// -// -// @Test -// public void testBranches() { -// -// var ga = new GA(GROUP_ID, "foo"); -// -// Assertions.assertThrows(ArtifactNotFoundException.class, () -> storage().getBranches(ga)); -// -// var content = ContentHandle.create(OPENAPI_CONTENT); -// ArtifactVersionMetaDataDto dtoV1 = storage().createArtifact(GROUP_ID, ga.getRawArtifactId(), null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dtoV1); -// Assertions.assertEquals(ga.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); -// Assertions.assertEquals(ga.getRawArtifactId(), dtoV1.getArtifactId()); -// -// var branches = storage().getBranches(ga); -// Assertions.assertEquals(Map.of(BranchId.LATEST, List.of(new GAV(ga, dtoV1.getVersion()))), branches); -// -// var latestBranch = storage().getBranch(ga, BranchId.LATEST, DEFAULT); -// Assertions.assertEquals(List.of(new GAV(ga, dtoV1.getVersion())), latestBranch); -// -// var gavV1 = storage().getBranchTip(ga, BranchId.LATEST, DEFAULT); -// Assertions.assertNotNull(gavV1); -// Assertions.assertEquals(gavV1.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); -// Assertions.assertEquals(gavV1.getRawArtifactId(), dtoV1.getArtifactId()); -// Assertions.assertEquals(gavV1.getRawVersionId(), dtoV1.getVersion()); -// -// var otherBranchId = new BranchId("other"); -// storage().createOrUpdateBranch(gavV1, otherBranchId); -// -// content = ContentHandle.create(OPENAPI_CONTENT_V2); -// var dtoV2 = storage().createArtifactVersion(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), null, ArtifactType.OPENAPI, content, null); -// Assertions.assertNotNull(dtoV2); -// Assertions.assertEquals(ga.getRawGroupIdWithDefaultString(), dtoV2.getGroupId()); -// Assertions.assertEquals(ga.getRawArtifactId(), dtoV2.getArtifactId()); -// -// branches = storage().getBranches(ga); -// Assertions.assertEquals(Map.of( -// BranchId.LATEST, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), -// otherBranchId, List.of(new GAV(ga, dtoV1.getVersion())) -// ), branches); -// -// latestBranch = storage().getBranch(ga, BranchId.LATEST, DEFAULT); -// Assertions.assertEquals(List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), latestBranch); -// -// var otherBranch = storage().getBranch(ga, otherBranchId, DEFAULT); -// Assertions.assertEquals(List.of(new GAV(ga, dtoV1.getVersion())), otherBranch); -// -// var gavV2 = storage().getBranchTip(ga, BranchId.LATEST, DEFAULT); -// Assertions.assertNotNull(gavV2); -// Assertions.assertEquals(gavV2.getRawGroupIdWithDefaultString(), dtoV2.getGroupId()); -// Assertions.assertEquals(gavV2.getRawArtifactId(), dtoV2.getArtifactId()); -// Assertions.assertEquals(gavV2.getRawVersionId(), dtoV2.getVersion()); -// -// gavV1 = storage().getBranchTip(ga, otherBranchId, DEFAULT); -// Assertions.assertNotNull(gavV1); -// Assertions.assertEquals(gavV1.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); -// Assertions.assertEquals(gavV1.getRawArtifactId(), dtoV1.getArtifactId()); -// Assertions.assertEquals(gavV1.getRawVersionId(), dtoV1.getVersion()); -// -// storage().createOrUpdateBranch(gavV2, otherBranchId); -// -// branches = storage().getBranches(ga); -// Assertions.assertEquals(Map.of( -// BranchId.LATEST, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), -// otherBranchId, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())) -// ), branches); -// -// Assertions.assertEquals(storage().getBranch(ga, BranchId.LATEST, DEFAULT), storage().getBranch(ga, otherBranchId, DEFAULT)); -// Assertions.assertEquals(storage().getBranchTip(ga, BranchId.LATEST, DEFAULT), storage().getBranchTip(ga, otherBranchId, DEFAULT)); -// -// updateVersionState(gavV2.getRawGroupIdWithDefaultString(), gavV2.getRawArtifactId(), gavV2.getRawVersionId(), VersionState.DISABLED); -// Assertions.assertEquals(List.of(gavV1), storage().getBranch(ga, BranchId.LATEST, SKIP_DISABLED_LATEST)); -// Assertions.assertEquals(gavV1, storage().getBranchTip(ga, BranchId.LATEST, ArtifactRetrievalBehavior.SKIP_DISABLED_LATEST)); -// -// updateVersionState(gavV2.getRawGroupIdWithDefaultString(), gavV2.getRawArtifactId(), gavV2.getRawVersionId(), VersionState.ENABLED); -// Assertions.assertEquals(List.of(gavV2, gavV1), storage().getBranch(ga, BranchId.LATEST, SKIP_DISABLED_LATEST)); -// Assertions.assertEquals(gavV2, storage().getBranchTip(ga, BranchId.LATEST, ArtifactRetrievalBehavior.SKIP_DISABLED_LATEST)); -// -// storage().deleteArtifactVersion(gavV1.getRawGroupIdWithDefaultString(), gavV1.getRawArtifactId(), gavV1.getRawVersionId()); -// -// Assertions.assertEquals(List.of(gavV2), storage().getBranch(ga, BranchId.LATEST, DEFAULT)); -// Assertions.assertEquals(List.of(gavV2), storage().getBranch(ga, otherBranchId, DEFAULT)); -// -// storage().deleteBranch(ga, otherBranchId); -// -// Assertions.assertThrows(BranchNotFoundException.class, () -> storage().getBranch(ga, otherBranchId, DEFAULT)); -// Assertions.assertThrows(VersionNotFoundException.class, () -> storage().getBranchTip(ga, otherBranchId, DEFAULT)); -// -// Assertions.assertThrows(NotAllowedException.class, () -> storage().deleteBranch(ga, BranchId.LATEST)); -// } -// -// private void updateVersionState(String groupId, String artifactId, String version, VersionState newState) { -// storage().updateArtifactVersionMetaData(groupId, artifactId, version, EditableVersionMetaDataDto.builder() -// .state(newState) -// .build()); -// } -// -// private static String generateString(int size) { -// StringBuilder builder = new StringBuilder(); -// for (int i = 0; i < size; i++) { -// builder.append("a"); -// } -// Assertions.assertEquals(size, builder.toString().length()); -// return builder.toString(); -// } + // + // @Test + // public void testGetArtifactIds() throws Exception { + // + // int size = storage().getArtifactIds(null).size(); + // + // String artifactIdPrefix = "testGetArtifactIds-"; + // for (int idx = 1; idx <= 10; idx++) { + // String artifactId = artifactIdPrefix + idx; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // //Verify group metadata is also created + // GroupMetaDataDto groupMetaDataDto = storage().getGroupMetaData(GROUP_ID); + // Assertions.assertNotNull(groupMetaDataDto); + // Assertions.assertEquals(GROUP_ID, groupMetaDataDto.getGroupId()); + // } + // + // int newsize = storage().getArtifactIds(null).size(); + // int newids = newsize - size; + // Assertions.assertEquals(10, newids); + // } + // + // @Test + // public void testCreateArtifact() throws Exception { + // String artifactId = "testCreateArtifact-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, + // dto.getVersion()); + // Assertions.assertNotNull(storedArtifact); + // Assertions.assertEquals(OPENAPI_CONTENT, storedArtifact.getContent().content()); + // Assertions.assertEquals(dto.getGlobalId(), storedArtifact.getGlobalId()); + // Assertions.assertEquals(dto.getVersion(), storedArtifact.getVersion()); + // + // ArtifactMetaDataDto amdDto = storage().getArtifactMetaData(GROUP_ID, artifactId); + // Assertions.assertNotNull(amdDto); + // Assertions.assertEquals("Empty API", amdDto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", amdDto.getDescription()); + // Assertions.assertNull(amdDto.getLabels()); + // + // ArtifactVersionMetaDataDto versionMetaDataDto = storage().getArtifactVersionMetaData(GROUP_ID, + // artifactId, "1"); + // Assertions.assertNotNull(versionMetaDataDto); + // Assertions.assertEquals(dto.getGlobalId(), versionMetaDataDto.getGlobalId()); + // Assertions.assertEquals("Empty API", versionMetaDataDto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", versionMetaDataDto.getDescription()); + // Assertions.assertEquals(VersionState.ENABLED, versionMetaDataDto.getState()); + // Assertions.assertEquals("1", versionMetaDataDto.getVersion()); + // + // StoredArtifactVersionDto storedVersion = storage().getArtifactVersionContent(dto.getGlobalId()); + // Assertions.assertNotNull(storedVersion); + // Assertions.assertEquals(OPENAPI_CONTENT, storedVersion.getContent().content()); + // Assertions.assertEquals(dto.getGlobalId(), storedVersion.getGlobalId()); + // Assertions.assertEquals(dto.getVersion(), storedVersion.getVersion()); + // + // storedVersion = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // Assertions.assertNotNull(storedVersion); + // Assertions.assertEquals(OPENAPI_CONTENT, storedVersion.getContent().content()); + // Assertions.assertEquals(dto.getGlobalId(), storedVersion.getGlobalId()); + // Assertions.assertEquals(dto.getVersion(), storedVersion.getVersion()); + // + // List versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertEquals("1", versions.iterator().next()); + // } + // + // @Test + // public void testCreateArtifactWithMetaData() throws Exception { + // String artifactId = "testCreateArtifactWithMetaData-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto( + // "NAME", "DESCRIPTION", null, Collections.singletonMap("KEY", "VALUE") + // ); + // ArtifactVersionMetaDataDto dto = storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, metaData, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("NAME", dto.getName()); + // Assertions.assertEquals("DESCRIPTION", dto.getDescription()); + // Assertions.assertNotNull(dto.getLabels()); + // Assertions.assertEquals(metaData.getLabels(), dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, + // "1"); + // Assertions.assertNotNull(storedArtifact); + // Assertions.assertEquals(OPENAPI_CONTENT, storedArtifact.getContent().content()); + // Assertions.assertEquals(dto.getGlobalId(), storedArtifact.getGlobalId()); + // Assertions.assertEquals(dto.getVersion(), storedArtifact.getVersion()); + // + // ArtifactMetaDataDto amdDto = storage().getArtifactMetaData(GROUP_ID, artifactId); + // Assertions.assertNotNull(amdDto); + // Assertions.assertEquals("NAME", amdDto.getName()); + // Assertions.assertEquals("DESCRIPTION", amdDto.getDescription()); + // Assertions.assertEquals(metaData.getLabels(), amdDto.getLabels()); + // } + // + // @Test + // public void testCreateArtifactWithLargeMetaData() throws Exception { + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // + // // Test creating an artifact with meta-data that is too large for the DB + // String artifactId = "testCreateArtifactWithLargeMetaData"; + // EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto(); + // metaData.setName(generateString(600)); + // metaData.setDescription(generateString(2000)); + // metaData.setLabels(new HashMap<>()); + // metaData.getLabels().put("key-" + generateString(300), "value-" + generateString(2000)); + // ArtifactVersionMetaDataDto dto = storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, metaData, null); + // + // dto = storage().getArtifactVersionMetaData(dto.getGlobalId()); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals(512, dto.getName().length()); + // Assertions.assertEquals(1024, dto.getDescription().length()); + // Assertions.assertTrue(dto.getDescription().endsWith("...")); + // Assertions.assertNotNull(dto.getLabels()); + // Assertions.assertEquals(1, dto.getLabels().size()); + // } + // + // @Test + // public void testCreateDuplicateArtifact() throws Exception { + // String artifactId = "testCreateDuplicateArtifact-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // + // // Should throw error for duplicate artifact. + // Assertions.assertThrows(ArtifactAlreadyExistsException.class, () -> { + // storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); + // }); + // } + // + // @Test + // public void testArtifactNotFound() throws Exception { + // String artifactId = "testArtifactNotFound-1"; + // + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // }); + // + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactMetaData(GROUP_ID, artifactId); + // }); + // + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // }); + // + // Assertions.assertThrows(VersionNotFoundException.class, () -> { + // storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); + // }); + // } + // + // @Test + // public void testCreateArtifactVersion() throws Exception { + // String artifactId = "testCreateArtifactVersion-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // List versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(1, versions.size()); + // + // ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, contentv2, null); + // Assertions.assertNotNull(dtov2); + // Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); + // Assertions.assertEquals(artifactId, dtov2.getArtifactId()); + // Assertions.assertEquals("2", dtov2.getVersion()); + // + // versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(2, versions.size()); + // } + // + // @Test + // public void testGetArtifactVersions() throws Exception { + // String artifactId = "testGetArtifactVersions"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // StoredArtifactVersionDto storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, + // "1"); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); + // + // storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); + // + // storedArtifact = storage().getArtifactVersionContent(dto.getGlobalId()); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); + // + // ArtifactVersionMetaDataDto dtov1 = storage().getArtifactVersionMetaData(dto.getGlobalId()); + // verifyArtifactMetadata(dtov1, dto); + // + // List versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(1, versions.size()); + // + // ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, contentv2, null); + // Assertions.assertNotNull(dtov2); + // Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); + // Assertions.assertEquals(artifactId, dtov2.getArtifactId()); + // Assertions.assertEquals("2", dtov2.getVersion()); + // + // versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(2, versions.size()); + // + // //verify version 2 + // + // storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "2"); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT_V2, dtov2); + // + // storedArtifact = storage().getArtifactVersionContent(dtov2.getGlobalId()); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT_V2, dtov2); + // + // ArtifactVersionMetaDataDto dtov2Stored = storage().getArtifactVersionMetaData(dtov2.getGlobalId()); + // verifyArtifactMetadata(dtov2Stored, dtov2); + // + // // verify version 1 again + // + // storedArtifact = storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); + // + // storedArtifact = storage().getArtifactVersionContent(dto.getGlobalId()); + // verifyArtifact(storedArtifact, OPENAPI_CONTENT, dto); + // + // dtov1 = storage().getArtifactVersionMetaData(dto.getGlobalId()); + // verifyArtifactMetadata(dtov1, dto); + // + // } + // + // private void verifyArtifact(StoredArtifactVersionDto storedArtifact, String content, + // ArtifactVersionMetaDataDto expectedMetadata) { + // Assertions.assertNotNull(storedArtifact); + // Assertions.assertEquals(content, storedArtifact.getContent().content()); + // Assertions.assertEquals(expectedMetadata.getGlobalId(), storedArtifact.getGlobalId()); + // Assertions.assertEquals(expectedMetadata.getVersion(), storedArtifact.getVersion()); + // } + // + // private void verifyArtifactMetadata(ArtifactVersionMetaDataDto actualMetadata, + // ArtifactVersionMetaDataDto expectedMetadata) { + // Assertions.assertNotNull(actualMetadata); + // Assertions.assertNotNull(expectedMetadata); + // Assertions.assertEquals(expectedMetadata.getName(), actualMetadata.getName()); + // Assertions.assertEquals(expectedMetadata.getDescription(), actualMetadata.getDescription()); + // } + // + // @Test + // public void testCreateArtifactVersionWithMetaData() throws Exception { + // String artifactId = "testCreateArtifactVersionWithMetaData-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // List versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(1, versions.size()); + // + // ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // EditableVersionMetaDataDto metaData = new EditableVersionMetaDataDto("NAME", "DESC", null, + // Collections.singletonMap("K", "V")); + // ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersionWithMetadata(GROUP_ID, artifactId, + // null, ArtifactType.OPENAPI, + // contentv2, metaData, null); + // Assertions.assertNotNull(dtov2); + // Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); + // Assertions.assertEquals(artifactId, dtov2.getArtifactId()); + // Assertions.assertEquals("2", dtov2.getVersion()); + // Assertions.assertEquals("NAME", dtov2.getName()); + // Assertions.assertEquals("DESC", dtov2.getDescription()); + // Assertions.assertEquals(metaData.getLabels(), dtov2.getLabels()); + // + // versions = storage().getArtifactVersions(GROUP_ID, artifactId); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(2, versions.size()); + // + // ArtifactVersionMetaDataDto vmd = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "2"); + // Assertions.assertNotNull(vmd); + // Assertions.assertEquals("NAME", vmd.getName()); + // Assertions.assertEquals("DESC", vmd.getDescription()); + // } + // + // @Test + // public void testGetArtifactMetaDataByGlobalId() throws Exception { + // String artifactId = "testGetArtifactMetaDataByGlobalId-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // long globalId = dto.getGlobalId(); + // + // dto = storage().getArtifactVersionMetaData(globalId); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // } + // + // @Test + // public void testUpdateArtifactMetaData() throws Exception { + // String artifactId = "testUpdateArtifactMetaData-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // String newName = "Updated Name"; + // String newDescription = "Updated description."; + // Map newLabels = new HashMap<>(); + // newLabels.put("foo", "bar"); + // newLabels.put("ting", "bin"); + // EditableArtifactMetaDataDto emd = new EditableArtifactMetaDataDto(newName, newDescription, null, + // newLabels); + // storage().updateArtifactMetaData(GROUP_ID, artifactId, emd); + // + // ArtifactMetaDataDto metaData = storage().getArtifactMetaData(GROUP_ID, artifactId); + // Assertions.assertNotNull(metaData); + // Assertions.assertEquals(newName, metaData.getName()); + // Assertions.assertEquals(newDescription, metaData.getDescription()); + // } + // + // @Test + // public void testUpdateArtifactVersionState() throws Exception { + // String artifactId = "testUpdateArtifactVersionState-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // + // ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, contentv2, null); + // Assertions.assertNotNull(dtov2); + // Assertions.assertEquals(GROUP_ID, dtov2.getGroupId()); + // Assertions.assertEquals(artifactId, dtov2.getArtifactId()); + // Assertions.assertEquals("2", dtov2.getVersion()); + // + // updateVersionState(GROUP_ID, artifactId, "1", VersionState.DISABLED); + // updateVersionState(GROUP_ID, artifactId, "2", VersionState.DEPRECATED); + // + // ArtifactVersionMetaDataDto v1 = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); + // ArtifactVersionMetaDataDto v2 = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "2"); + // Assertions.assertNotNull(v1); + // Assertions.assertNotNull(v2); + // Assertions.assertEquals(VersionState.DISABLED, v1.getState()); + // Assertions.assertEquals(VersionState.DEPRECATED, v2.getState()); + // } + // + // @Test + // public void testUpdateArtifactVersionMetaData() throws Exception { + // String artifactId = "testUpdateArtifactVersionMetaData-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // String newName = "Updated Name"; + // String newDescription = "Updated description."; + // Map newLabels = new HashMap<>(); + // newLabels.put("foo", "bar"); + // newLabels.put("ting", "bin"); + // EditableVersionMetaDataDto emd = new EditableVersionMetaDataDto(newName, newDescription, null, + // newLabels); + // storage().updateArtifactVersionMetaData(GROUP_ID, artifactId, "1", emd); + // + // ArtifactVersionMetaDataDto metaData = storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); + // Assertions.assertNotNull(metaData); + // Assertions.assertEquals(newName, metaData.getName()); + // Assertions.assertEquals(newDescription, metaData.getDescription()); + // } + // + // @Test + // public void testDeleteArtifact() throws Exception { + // String artifactId = "testDeleteArtifact-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // Assertions.assertEquals("Empty API", dto.getName()); + // Assertions.assertEquals("An example API design using OpenAPI.", dto.getDescription()); + // Assertions.assertNull(dto.getLabels()); + // Assertions.assertEquals("1", dto.getVersion()); + // + // storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // + // storage().deleteArtifact(GROUP_ID, artifactId); + // + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // }); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactMetaData(GROUP_ID, artifactId); + // }); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, artifactId, "1"); + // }); + // Assertions.assertThrows(VersionNotFoundException.class, () -> { + // storage().getArtifactVersionMetaData(GROUP_ID, artifactId, "1"); + // }); + // } + // + // @Test + // public void testDeleteArtifactVersion() throws Exception { + // // Delete the only version + // //////////////////////////// + // String artifactId = "testDeleteArtifactVersion-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals("1", dto.getVersion()); + // + // storage().deleteArtifactVersion(GROUP_ID, artifactId, "1"); + // + // final String aid1 = artifactId; + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, aid1, "1"); + // }); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactMetaData(GROUP_ID, aid1); + // }); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, aid1, "1"); + // }); + // Assertions.assertThrows(VersionNotFoundException.class, () -> { + // storage().getArtifactVersionMetaData(GROUP_ID, aid1, "1"); + // }); + // + // // Delete one of multiple versions + // artifactId = "testDeleteArtifactVersion-2"; + // content = ContentHandle.create(OPENAPI_CONTENT); + // dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals("1", dto.getVersion()); + // + // ContentHandle contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // ArtifactVersionMetaDataDto dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, contentv2, null); + // Assertions.assertNotNull(dtov2); + // Assertions.assertEquals("2", dtov2.getVersion()); + // + // storage().deleteArtifactVersion(GROUP_ID, artifactId, "1"); + // + // final String aid2 = artifactId; + // + // storage().getArtifactMetaData(GROUP_ID, aid2); + // storage().getArtifactVersionContent(GROUP_ID, aid2, "2"); + // storage().getArtifactVersionMetaData(GROUP_ID, aid2, "2"); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, aid2, "1"); + // }); + // Assertions.assertThrows(VersionNotFoundException.class, () -> { + // storage().getArtifactVersionMetaData(GROUP_ID, aid2, "1"); + // }); + // + // ArtifactVersionMetaDataDto dtov3 = storage().createArtifactVersion(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dtov3); + // Assertions.assertEquals("3", dtov3.getVersion()); + // + // // Update version 2 to DISABLED state and delete latest version + // updateVersionState(GROUP_ID, artifactId, "2", VersionState.DISABLED); + // storage().deleteArtifactVersion(GROUP_ID, artifactId, "3"); + // + // GAV latestGAV = storage().getBranchTip(new GA(GROUP_ID, artifactId), BranchId.LATEST, DEFAULT); + // ArtifactVersionMetaDataDto artifactMetaData = storage().getArtifactVersionMetaData(GROUP_ID, aid2, + // latestGAV.getRawVersionId()); + // Assertions.assertNotNull(artifactMetaData); + // Assertions.assertEquals("2", artifactMetaData.getVersion()); + // Assertions.assertEquals(aid2, artifactMetaData.getArtifactId()); + // + // // Delete the latest version + // artifactId = "testDeleteArtifactVersion-3"; + // content = ContentHandle.create(OPENAPI_CONTENT); + // dto = storage().createArtifact(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals("1", dto.getVersion()); + // + // contentv2 = ContentHandle.create(OPENAPI_CONTENT_V2); + // dtov2 = storage().createArtifactVersion(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, contentv2, + // null); + // Assertions.assertNotNull(dtov2); + // + // final String aid3 = artifactId; + // storage().deleteArtifactVersion(GROUP_ID, aid3, "2"); + // List versions = storage().getArtifactVersions(GROUP_ID, aid3); + // Assertions.assertNotNull(versions); + // Assertions.assertFalse(versions.isEmpty()); + // Assertions.assertEquals(1, versions.size()); + // Assertions.assertEquals("1", versions.iterator().next()); + // + // VersionSearchResultsDto result = storage().searchVersions(GROUP_ID, aid3, 0, 10); + // Assertions.assertNotNull(result); + // Assertions.assertEquals(1, result.getCount()); + // Assertions.assertEquals("1", result.getVersions().iterator().next().getVersion()); + // + // artifactMetaData = storage().getArtifactVersionMetaData(GROUP_ID, aid3, "1"); + // Assertions.assertNotNull(artifactMetaData); + // Assertions.assertEquals("1", artifactMetaData.getVersion()); + // Assertions.assertEquals(aid3, artifactMetaData.getArtifactId()); + // + // storage().getArtifactVersionContent(GROUP_ID, aid3, "1"); + // ArtifactVersionMetaDataDto metaData = storage().getArtifactVersionMetaData(GROUP_ID, aid3, "1"); + // Assertions.assertNotNull(metaData); + // Assertions.assertEquals("1", metaData.getVersion()); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactVersionContent(GROUP_ID, aid3, "2"); + // }); + // Assertions.assertThrows(VersionNotFoundException.class, () -> { + // storage().getArtifactVersionMetaData(GROUP_ID, aid3, "2"); + // }); + // + // // Delete the only artifact version left - same as deleting the whole artifact + // storage().deleteArtifactVersion(GROUP_ID, aid3, "1"); + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> { + // storage().getArtifactMetaData(GROUP_ID, aid3); + // }); + // } + // + // @Test + // public void testCreateArtifactRule() throws Exception { + // String artifactId = "testCreateArtifactRule-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // List artifactRules = storage().getArtifactRules(GROUP_ID, artifactId); + // Assertions.assertNotNull(artifactRules); + // Assertions.assertTrue(artifactRules.isEmpty()); + // + // RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); + // storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); + // + // artifactRules = storage().getArtifactRules(GROUP_ID, artifactId); + // Assertions.assertNotNull(artifactRules); + // Assertions.assertFalse(artifactRules.isEmpty()); + // Assertions.assertEquals(1, artifactRules.size()); + // Assertions.assertEquals(RuleType.VALIDITY, artifactRules.get(0)); + // } + // + // @Test + // public void testUpdateArtifactRule() throws Exception { + // String artifactId = "testUpdateArtifactRule-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); + // storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); + // + // RuleConfigurationDto rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // Assertions.assertNotNull(rule); + // Assertions.assertEquals("FULL", rule.getConfiguration()); + // + // RuleConfigurationDto updatedConfig = new RuleConfigurationDto("NONE"); + // storage().updateArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, updatedConfig); + // + // rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // Assertions.assertNotNull(rule); + // Assertions.assertEquals("NONE", rule.getConfiguration()); + // } + // + // @Test + // public void testDeleteArtifactRule() throws Exception { + // String artifactId = "testDeleteArtifactRule-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); + // storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); + // + // RuleConfigurationDto rule = storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // Assertions.assertNotNull(rule); + // Assertions.assertEquals("FULL", rule.getConfiguration()); + // + // storage().deleteArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // + // Assertions.assertThrows(RuleNotFoundException.class, () -> { + // storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // }); + // } + // + // @Test + // public void testDeleteAllArtifactRules() throws Exception { + // String artifactId = "testDeleteAllArtifactRulse-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // RuleConfigurationDto configDto = new RuleConfigurationDto("FULL"); + // storage().createArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY, configDto); + // storage().createArtifactRule(GROUP_ID, artifactId, RuleType.COMPATIBILITY, configDto); + // + // List rules = storage().getArtifactRules(GROUP_ID, artifactId); + // Assertions.assertEquals(2, rules.size()); + // + // storage().deleteArtifactRules(GROUP_ID, artifactId); + // + // Assertions.assertThrows(RuleNotFoundException.class, () -> { + // storage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY); + // }); + // Assertions.assertThrows(RuleNotFoundException.class, () -> { + // storage().getArtifactRule(GROUP_ID, artifactId, RuleType.COMPATIBILITY); + // }); + // } + // + // @Test + // public void testGlobalRules() { + // List globalRules = storage().getGlobalRules(); + // Assertions.assertNotNull(globalRules); + // Assertions.assertTrue(globalRules.isEmpty()); + // + // RuleConfigurationDto config = new RuleConfigurationDto(); + // config.setConfiguration("FULL"); + // storage().createGlobalRule(RuleType.COMPATIBILITY, config); + // + // RuleConfigurationDto rule = storage().getGlobalRule(RuleType.COMPATIBILITY); + // Assertions.assertEquals(rule.getConfiguration(), config.getConfiguration()); + // + // globalRules = storage().getGlobalRules(); + // Assertions.assertNotNull(globalRules); + // Assertions.assertFalse(globalRules.isEmpty()); + // Assertions.assertEquals(globalRules.size(), 1); + // Assertions.assertEquals(globalRules.get(0), RuleType.COMPATIBILITY); + // + // Assertions.assertThrows(RuleAlreadyExistsException.class, () -> { + // storage().createGlobalRule(RuleType.COMPATIBILITY, config); + // }); + // + // RuleConfigurationDto updatedConfig = new RuleConfigurationDto("FORWARD"); + // storage().updateGlobalRule(RuleType.COMPATIBILITY, updatedConfig); + // + // rule = storage().getGlobalRule(RuleType.COMPATIBILITY); + // Assertions.assertEquals(rule.getConfiguration(), updatedConfig.getConfiguration()); + // + // Assertions.assertThrows(RuleNotFoundException.class, () -> { + // storage().updateGlobalRule(RuleType.VALIDITY, config); + // }); + // + // storage().deleteGlobalRules(); + // globalRules = storage().getGlobalRules(); + // Assertions.assertNotNull(globalRules); + // Assertions.assertTrue(globalRules.isEmpty()); + // + // storage().createGlobalRule(RuleType.COMPATIBILITY, config); + // storage().deleteGlobalRule(RuleType.COMPATIBILITY); + // globalRules = storage().getGlobalRules(); + // Assertions.assertNotNull(globalRules); + // Assertions.assertTrue(globalRules.isEmpty()); + // } + // + // @Test + // public void testSearchArtifacts() throws Exception { + // String artifactIdPrefix = "testSearchArtifacts-"; + // for (int idx = 1; idx <= 50; idx++) { + // String idxs = (idx < 10 ? "0" : "") + idx; + // String artifactId = artifactIdPrefix + idxs; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // Map labels = Collections.singletonMap("key", "value-" + idx); + // EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto( + // artifactId + "-name", + // artifactId + "-description", + // null, + // labels); + // storage().createArtifactWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, + // metaData, null); + // } + // + // long start = System.currentTimeMillis(); + // + // Set filters = Collections.singleton(SearchFilter.ofName("testSearchArtifacts")); + // ArtifactSearchResultsDto results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, + // 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(50, results.getCount()); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(10, results.getArtifacts().size()); + // + // + // filters = Collections.singleton(SearchFilter.ofName("testSearchArtifacts-19-name")); + // results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(1, results.getCount()); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(1, results.getArtifacts().size()); + // Assertions.assertEquals("testSearchArtifacts-19-name", results.getArtifacts().get(0).getName()); + // + // + // filters = Collections.singleton(SearchFilter.ofDescription("testSearchArtifacts-33-description")); + // results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(1, results.getCount()); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(1, results.getArtifacts().size()); + // Assertions.assertEquals("testSearchArtifacts-33-name", results.getArtifacts().get(0).getName()); + // + // + // filters = Collections.emptySet(); + // results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(10, results.getArtifacts().size()); + // + // + // filters = Collections.singleton(SearchFilter.ofEverything("testSearchArtifacts")); + // results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 1000); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(50, results.getCount()); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(50, results.getArtifacts().size()); + // Assertions.assertEquals("testSearchArtifacts-01-name", results.getArtifacts().get(0).getName()); + // Assertions.assertEquals("testSearchArtifacts-02-name", results.getArtifacts().get(1).getName()); + // + // + // filters = Collections.singleton(SearchFilter.ofLabel("key", "value-17")); + // results = storage().searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(1, results.getCount()); + // Assertions.assertNotNull(results.getArtifacts()); + // Assertions.assertEquals(1, results.getArtifacts().size()); + // Assertions.assertEquals("testSearchArtifacts-17-name", results.getArtifacts().get(0).getName()); + // + // + // long end = System.currentTimeMillis(); + // System.out.println("Search time: " + (end - start) + "ms"); + // } + // + // @Test + // public void testSearchVersions() throws Exception { + // String artifactId = "testSearchVersions-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // // Add more versions + // for (int idx = 2; idx <= 50; idx++) { + // content = ContentHandle.create(OPENAPI_CONTENT_TEMPLATE.replaceAll("VERSION", "1.0." + idx)); + // EditableVersionMetaDataDto metaData = new EditableVersionMetaDataDto( + // artifactId + "-name-" + idx, + // artifactId + "-description-" + idx, + // null, null); + // storage().createArtifactVersionWithMetadata(GROUP_ID, artifactId, null, ArtifactType.OPENAPI, content, + // metaData, null); + // } + // + // TestUtils.retry(() -> { + // VersionSearchResultsDto results = storage().searchVersions(GROUP_ID, artifactId, 0, 10); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(50, results.getCount()); + // Assertions.assertEquals(10, results.getVersions().size()); + // + // results = storage().searchVersions(GROUP_ID, artifactId, 0, 1000); + // Assertions.assertNotNull(results); + // Assertions.assertEquals(50, results.getCount()); + // Assertions.assertEquals(50, results.getVersions().size()); + // }); + // } + // + // private void createSomeUserData() { + // final String group1 = "testGroup-1"; + // final String group2 = "testGroup-2"; + // final String artifactId1 = "testArtifact-1"; + // final String artifactId2 = "testArtifact-2"; + // final String principal = "testPrincipal"; + // final String role = "testRole"; + // + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // // storage().createGroup(GroupMetaDataDto.builder().groupId(group1).build()); + // // ^ TODO Uncomment after https://github.com/Apicurio/apicurio-registry/issues/1721 + // ArtifactVersionMetaDataDto artifactDto1 = storage().createArtifact(group1, artifactId1, null, + // ArtifactType.OPENAPI, content, null); + // storage().createArtifactRule(group1, artifactId1, RuleType.VALIDITY, + // RuleConfigurationDto.builder().configuration("FULL").build()); + // ArtifactVersionMetaDataDto artifactDto2 = storage().createArtifactWithMetadata( + // group2, artifactId2, null, ArtifactType.OPENAPI, content, + // EditableArtifactMetaDataDto.builder().name("test").build(), null); + // storage().createGlobalRule(RuleType.VALIDITY, + // RuleConfigurationDto.builder().configuration("FULL").build()); + // storage().createRoleMapping(principal, role, null); + // + // // Verify data exists + // + // Assertions.assertNotNull(storage().getArtifactVersionContent(group1, artifactId1, + // artifactDto1.getVersion())); + // Assertions.assertEquals(1, storage().getArtifactRules(group1, artifactId1).size()); + // Assertions.assertNotNull(storage().getArtifactVersionContent(group2, artifactId2, + // artifactDto2.getVersion())); + // Assertions.assertEquals(1, storage().getGlobalRules().size()); + // Assertions.assertEquals(role, storage().getRoleForPrincipal(principal)); + // } + // + // private int countStorageEntities() { + // // We don't need thread safety, but it's simpler to use this when effectively final counter is needed + // final AtomicInteger count = new AtomicInteger(0); + // storage().exportData(e -> { + // if (e.getEntityType() != EntityType.Manifest) { + // log.debug("Counting from export: {}", e); + // count.incrementAndGet(); + // } + // return null; + // }); + // int res = count.get(); + // // Count data that is not exported + // res += storage().getRoleMappings().size(); + // return res; + // } + // + // @Test + // public void testDeleteAllUserData() { + // // Delete first to cleanup after other tests + // storage().deleteAllUserData(); + // createSomeUserData(); + // Assertions.assertEquals(10, countStorageEntities()); + // // ^ TODO Change to 9 after https://github.com/Apicurio/apicurio-registry/issues/1721 + // // Delete all + // storage().deleteAllUserData(); + // Assertions.assertEquals(0, countStorageEntities()); + // } + // + // @Test + // public void testConfigProperties() throws Exception { + // List properties = storage().getConfigProperties(); + // Assertions.assertNotNull(properties); + // Assertions.assertTrue(properties.isEmpty()); + // + // storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-string", + // "test-value")); + // storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-boolean", "true")); + // storage().setConfigProperty(new DynamicConfigPropertyDto("apicurio.test.property-long", "12345")); + // + // properties = storage().getConfigProperties(); + // Assertions.assertNotNull(properties); + // Assertions.assertFalse(properties.isEmpty()); + // Assertions.assertEquals(3, properties.size()); + // + // DynamicConfigPropertyDto stringProp = getProperty(properties, "apicurio.test.property-string"); + // DynamicConfigPropertyDto boolProp = getProperty(properties, "apicurio.test.property-boolean"); + // DynamicConfigPropertyDto longProp = getProperty(properties, "apicurio.test.property-long"); + // + // Assertions.assertNotNull(stringProp); + // Assertions.assertNotNull(boolProp); + // Assertions.assertNotNull(longProp); + // + // Assertions.assertEquals("test-value", stringProp.getValue()); + // Assertions.assertEquals("true", boolProp.getValue()); + // Assertions.assertEquals("12345", longProp.getValue()); + // } + // + // private DynamicConfigPropertyDto getProperty(List properties, String + // propertyName) { + // for (DynamicConfigPropertyDto prop : properties) { + // if (prop.getName().equals(propertyName)) { + // return prop; + // } + // } + // return null; + // } + // + // + // @Test + // public void testComments() { + // String artifactId = "testComments-1"; + // ContentHandle content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dto = storage().createArtifact(GROUP_ID, artifactId, null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dto); + // Assertions.assertEquals(GROUP_ID, dto.getGroupId()); + // Assertions.assertEquals(artifactId, dto.getArtifactId()); + // + // List comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, + // dto.getVersion()); + // Assertions.assertTrue(comments.isEmpty()); + // + // storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_1"); + // storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_2"); + // storage().createArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), "TEST_COMMENT_3"); + // + // comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); + // Assertions.assertEquals(3, comments.size()); + // + // storage().deleteArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), + // comments.get(1).getCommentId()); + // + // comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); + // Assertions.assertEquals(2, comments.size()); + // + // storage().updateArtifactVersionComment(GROUP_ID, artifactId, dto.getVersion(), + // comments.get(0).getCommentId(), "TEST_COMMENT_4"); + // + // comments = storage().getArtifactVersionComments(GROUP_ID, artifactId, dto.getVersion()); + // Assertions.assertEquals(2, comments.size()); + // Assertions.assertEquals("TEST_COMMENT_4", comments.get(0).getValue()); + // } + // + // + // @Test + // public void testBranches() { + // + // var ga = new GA(GROUP_ID, "foo"); + // + // Assertions.assertThrows(ArtifactNotFoundException.class, () -> storage().getBranches(ga)); + // + // var content = ContentHandle.create(OPENAPI_CONTENT); + // ArtifactVersionMetaDataDto dtoV1 = storage().createArtifact(GROUP_ID, ga.getRawArtifactId(), null, + // ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dtoV1); + // Assertions.assertEquals(ga.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); + // Assertions.assertEquals(ga.getRawArtifactId(), dtoV1.getArtifactId()); + // + // var branches = storage().getBranches(ga); + // Assertions.assertEquals(Map.of(BranchId.LATEST, List.of(new GAV(ga, dtoV1.getVersion()))), branches); + // + // var latestBranch = storage().getBranch(ga, BranchId.LATEST, DEFAULT); + // Assertions.assertEquals(List.of(new GAV(ga, dtoV1.getVersion())), latestBranch); + // + // var gavV1 = storage().getBranchTip(ga, BranchId.LATEST, DEFAULT); + // Assertions.assertNotNull(gavV1); + // Assertions.assertEquals(gavV1.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); + // Assertions.assertEquals(gavV1.getRawArtifactId(), dtoV1.getArtifactId()); + // Assertions.assertEquals(gavV1.getRawVersionId(), dtoV1.getVersion()); + // + // var otherBranchId = new BranchId("other"); + // storage().createOrUpdateBranch(gavV1, otherBranchId); + // + // content = ContentHandle.create(OPENAPI_CONTENT_V2); + // var dtoV2 = storage().createArtifactVersion(ga.getRawGroupIdWithDefaultString(), ga.getRawArtifactId(), + // null, ArtifactType.OPENAPI, content, null); + // Assertions.assertNotNull(dtoV2); + // Assertions.assertEquals(ga.getRawGroupIdWithDefaultString(), dtoV2.getGroupId()); + // Assertions.assertEquals(ga.getRawArtifactId(), dtoV2.getArtifactId()); + // + // branches = storage().getBranches(ga); + // Assertions.assertEquals(Map.of( + // BranchId.LATEST, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), + // otherBranchId, List.of(new GAV(ga, dtoV1.getVersion())) + // ), branches); + // + // latestBranch = storage().getBranch(ga, BranchId.LATEST, DEFAULT); + // Assertions.assertEquals(List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), + // latestBranch); + // + // var otherBranch = storage().getBranch(ga, otherBranchId, DEFAULT); + // Assertions.assertEquals(List.of(new GAV(ga, dtoV1.getVersion())), otherBranch); + // + // var gavV2 = storage().getBranchTip(ga, BranchId.LATEST, DEFAULT); + // Assertions.assertNotNull(gavV2); + // Assertions.assertEquals(gavV2.getRawGroupIdWithDefaultString(), dtoV2.getGroupId()); + // Assertions.assertEquals(gavV2.getRawArtifactId(), dtoV2.getArtifactId()); + // Assertions.assertEquals(gavV2.getRawVersionId(), dtoV2.getVersion()); + // + // gavV1 = storage().getBranchTip(ga, otherBranchId, DEFAULT); + // Assertions.assertNotNull(gavV1); + // Assertions.assertEquals(gavV1.getRawGroupIdWithDefaultString(), dtoV1.getGroupId()); + // Assertions.assertEquals(gavV1.getRawArtifactId(), dtoV1.getArtifactId()); + // Assertions.assertEquals(gavV1.getRawVersionId(), dtoV1.getVersion()); + // + // storage().createOrUpdateBranch(gavV2, otherBranchId); + // + // branches = storage().getBranches(ga); + // Assertions.assertEquals(Map.of( + // BranchId.LATEST, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())), + // otherBranchId, List.of(new GAV(ga, dtoV2.getVersion()), new GAV(ga, dtoV1.getVersion())) + // ), branches); + // + // Assertions.assertEquals(storage().getBranch(ga, BranchId.LATEST, DEFAULT), storage().getBranch(ga, + // otherBranchId, DEFAULT)); + // Assertions.assertEquals(storage().getBranchTip(ga, BranchId.LATEST, DEFAULT), + // storage().getBranchTip(ga, otherBranchId, DEFAULT)); + // + // updateVersionState(gavV2.getRawGroupIdWithDefaultString(), gavV2.getRawArtifactId(), + // gavV2.getRawVersionId(), VersionState.DISABLED); + // Assertions.assertEquals(List.of(gavV1), storage().getBranch(ga, BranchId.LATEST, + // SKIP_DISABLED_LATEST)); + // Assertions.assertEquals(gavV1, storage().getBranchTip(ga, BranchId.LATEST, + // ArtifactRetrievalBehavior.SKIP_DISABLED_LATEST)); + // + // updateVersionState(gavV2.getRawGroupIdWithDefaultString(), gavV2.getRawArtifactId(), + // gavV2.getRawVersionId(), VersionState.ENABLED); + // Assertions.assertEquals(List.of(gavV2, gavV1), storage().getBranch(ga, BranchId.LATEST, + // SKIP_DISABLED_LATEST)); + // Assertions.assertEquals(gavV2, storage().getBranchTip(ga, BranchId.LATEST, + // ArtifactRetrievalBehavior.SKIP_DISABLED_LATEST)); + // + // storage().deleteArtifactVersion(gavV1.getRawGroupIdWithDefaultString(), gavV1.getRawArtifactId(), + // gavV1.getRawVersionId()); + // + // Assertions.assertEquals(List.of(gavV2), storage().getBranch(ga, BranchId.LATEST, DEFAULT)); + // Assertions.assertEquals(List.of(gavV2), storage().getBranch(ga, otherBranchId, DEFAULT)); + // + // storage().deleteBranch(ga, otherBranchId); + // + // Assertions.assertThrows(BranchNotFoundException.class, () -> storage().getBranch(ga, otherBranchId, + // DEFAULT)); + // Assertions.assertThrows(VersionNotFoundException.class, () -> storage().getBranchTip(ga, otherBranchId, + // DEFAULT)); + // + // Assertions.assertThrows(NotAllowedException.class, () -> storage().deleteBranch(ga, BranchId.LATEST)); + // } + // + // private void updateVersionState(String groupId, String artifactId, String version, VersionState + // newState) { + // storage().updateArtifactVersionMetaData(groupId, artifactId, version, + // EditableVersionMetaDataDto.builder() + // .state(newState) + // .build()); + // } + // + // private static String generateString(int size) { + // StringBuilder builder = new StringBuilder(); + // for (int i = 0; i < size; i++) { + // builder.append("a"); + // } + // Assertions.assertEquals(size, builder.toString().length()); + // return builder.toString(); + // } } diff --git a/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStoragePerformanceTest.java b/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStoragePerformanceTest.java index 1af5ad2059..5d6ec76c4b 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStoragePerformanceTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStoragePerformanceTest.java @@ -31,16 +31,11 @@ public class RegistryStoragePerformanceTest { private static final String GROUP_ID = RegistryStoragePerformanceTest.class.getSimpleName(); private static final int NUM_ARTIFACTS = 50000; -// private static final int NUM_VERSIONS = 5; + // private static final int NUM_VERSIONS = 5; - private static final String OPENAPI_CONTENT_TEMPLATE = "{" + - " \"openapi\": \"3.0.2\"," + - " \"info\": {" + - " \"title\": \"TITLE\"," + - " \"version\": \"VERSION\"," + - " \"description\": \"DESCRIPTION\"" + - " }" + - "}"; + private static final String OPENAPI_CONTENT_TEMPLATE = "{" + " \"openapi\": \"3.0.2\"," + + " \"info\": {" + " \"title\": \"TITLE\"," + " \"version\": \"VERSION\"," + + " \"description\": \"DESCRIPTION\"" + " }" + "}"; @Inject @Current @@ -70,20 +65,14 @@ public void testStoragePerformance() throws Exception { Map labels = new HashMap<>(); labels.put("key", "value"); labels.put("key-" + idx, "value-" + idx); - ContentHandle content = ContentHandle.create( - OPENAPI_CONTENT_TEMPLATE - .replaceAll("TITLE", title) - .replaceAll("DESCRIPTION", description) - .replaceAll("VERSION", String.valueOf(idx))); - ContentWrapperDto versionContent = ContentWrapperDto.builder() - .content(content) - .contentType(ContentTypes.APPLICATION_JSON) - .build(); - EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto(title, description, null, labels); - EditableVersionMetaDataDto versionMetaData = EditableVersionMetaDataDto.builder() - .name(title) - .description(description) - .build(); + ContentHandle content = ContentHandle.create(OPENAPI_CONTENT_TEMPLATE.replaceAll("TITLE", title) + .replaceAll("DESCRIPTION", description).replaceAll("VERSION", String.valueOf(idx))); + ContentWrapperDto versionContent = ContentWrapperDto.builder().content(content) + .contentType(ContentTypes.APPLICATION_JSON).build(); + EditableArtifactMetaDataDto metaData = new EditableArtifactMetaDataDto(title, description, null, + labels); + EditableVersionMetaDataDto versionMetaData = EditableVersionMetaDataDto.builder().name(title) + .description(description).build(); storage.createArtifact(GROUP_ID, artifactId, ArtifactType.OPENAPI, metaData, null, versionContent, versionMetaData, List.of()); @@ -95,7 +84,8 @@ public void testStoragePerformance() throws Exception { long endCreate = System.currentTimeMillis(); long startGetArtifact = System.currentTimeMillis(); - StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(GROUP_ID, artifactIdPrefix + "77", "1"); + StoredArtifactVersionDto storedArtifact = storage.getArtifactVersionContent(GROUP_ID, + artifactIdPrefix + "77", "1"); long endGetArtifact = System.currentTimeMillis(); Assertions.assertNotNull(storedArtifact); @@ -106,7 +96,8 @@ public void testStoragePerformance() throws Exception { long startAllSearch = System.currentTimeMillis(); Set filters = Collections.emptySet(); - ArtifactSearchResultsDto results = storage.searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 20); + ArtifactSearchResultsDto results = storage.searchArtifacts(filters, OrderBy.name, OrderDirection.asc, + 0, 20); long endAllSearch = System.currentTimeMillis(); Assertions.assertNotNull(results); Assertions.assertEquals(NUM_ARTIFACTS, results.getCount()); @@ -127,7 +118,7 @@ public void testStoragePerformance() throws Exception { long startLabelSearch = System.currentTimeMillis(); - filters = Collections.singleton(SearchFilter.ofLabel("key-" + (NUM_ARTIFACTS-1))); + filters = Collections.singleton(SearchFilter.ofLabel("key-" + (NUM_ARTIFACTS - 1))); results = storage.searchArtifacts(filters, OrderBy.name, OrderDirection.asc, 0, 10); long endLabelSearch = System.currentTimeMillis(); Assertions.assertNotNull(results); @@ -143,10 +134,12 @@ public void testStoragePerformance() throws Exception { System.out.println("========================================================================"); System.out.println("= Storage Performance Results ="); System.out.println("=----------------------------------------------------------------------="); - System.out.println("| Time to create " + NUM_ARTIFACTS + " artifacts: " + (endCreate - startCreate) + "ms"); + System.out.println( + "| Time to create " + NUM_ARTIFACTS + " artifacts: " + (endCreate - startCreate) + "ms"); System.out.println("| "); System.out.println("| Get Artifact Content: " + (endGetArtifact - startGetArtifact) + "ms"); - System.out.println("| Get Artifact Meta-Data: " + (endGetArtifactMetaData - startGetArtifactMetaData) + "ms"); + System.out.println( + "| Get Artifact Meta-Data: " + (endGetArtifactMetaData - startGetArtifactMetaData) + "ms"); System.out.println("| "); System.out.println("| All Artifact Search: " + (endAllSearch - startAllSearch) + "ms"); System.out.println("| Single Name Search: " + (endNameSearch - startNameSearch) + "ms"); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStorageSmokeTest.java b/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStorageSmokeTest.java index 3ce66747b7..fdac6f7fc3 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStorageSmokeTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/storage/RegistryStorageSmokeTest.java @@ -88,51 +88,54 @@ public void testArtifactsAndMeta() throws Exception { // Create artifact 1 EditableArtifactMetaDataDto artifactMetaData1 = EditableArtifactMetaDataDto.builder().build(); ContentWrapperDto versionContent1 = ContentWrapperDto.builder() - .content(ContentHandle.create("content1")) - .contentType(ContentTypes.APPLICATION_JSON) - .build(); + .content(ContentHandle.create("content1")).contentType(ContentTypes.APPLICATION_JSON).build(); EditableVersionMetaDataDto versionMetaData1 = EditableVersionMetaDataDto.builder().build(); - ArtifactVersionMetaDataDto vmdDto1_1 = getStorage().createArtifact(GROUP_ID, artifactId1, ArtifactType.JSON, - artifactMetaData1, null, versionContent1, versionMetaData1, List.of()).getRight(); + ArtifactVersionMetaDataDto vmdDto1_1 = getStorage().createArtifact(GROUP_ID, artifactId1, + ArtifactType.JSON, artifactMetaData1, null, versionContent1, versionMetaData1, List.of()) + .getRight(); // Create version 2 (for artifact 1) - ArtifactVersionMetaDataDto vmdDto1_2 = getStorage().createArtifactVersion(GROUP_ID, artifactId1, null, ArtifactType.JSON, - versionContent1, versionMetaData1, List.of()); + ArtifactVersionMetaDataDto vmdDto1_2 = getStorage().createArtifactVersion(GROUP_ID, artifactId1, null, + ArtifactType.JSON, versionContent1, versionMetaData1, List.of()); // Create artifact 2 EditableArtifactMetaDataDto artifactMetaData2 = EditableArtifactMetaDataDto.builder().build(); ContentWrapperDto versionContent2 = ContentWrapperDto.builder() - .content(ContentHandle.create("content2")) - .contentType(ContentTypes.APPLICATION_JSON) - .build(); + .content(ContentHandle.create("content2")).contentType(ContentTypes.APPLICATION_JSON).build(); EditableVersionMetaDataDto versionMetaData2 = EditableVersionMetaDataDto.builder().build(); - getStorage().createArtifact(GROUP_ID, artifactId2, ArtifactType.AVRO, - artifactMetaData2, null, versionContent2, versionMetaData2, List.of()).getRight(); + getStorage().createArtifact(GROUP_ID, artifactId2, ArtifactType.AVRO, artifactMetaData2, null, + versionContent2, versionMetaData2, List.of()).getRight(); assertEquals(size + 2, getStorage().getArtifactIds(null).size()); assertTrue(getStorage().getArtifactIds(null).contains(artifactId1)); - StoredArtifactVersionDto a1 = getStorage().getArtifactVersionContent(GROUP_ID, artifactId1, vmdDto1_2.getVersion()); + StoredArtifactVersionDto a1 = getStorage().getArtifactVersionContent(GROUP_ID, artifactId1, + vmdDto1_2.getVersion()); assertNotNull(a1); assertNotNull(a1.getGlobalId()); assertNotNull(a1.getVersion()); assertNotNull(a1.getContent()); - GAV latestGAV = getStorage().getBranchTip(new GA(GROUP_ID, artifactId1), BranchId.LATEST, RetrievalBehavior.DEFAULT); - ArtifactVersionMetaDataDto metaLatest = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, latestGAV.getRawVersionId()); + GAV latestGAV = getStorage().getBranchTip(new GA(GROUP_ID, artifactId1), BranchId.LATEST, + RetrievalBehavior.DEFAULT); + ArtifactVersionMetaDataDto metaLatest = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, + latestGAV.getRawVersionId()); assertEquals(vmdDto1_2, metaLatest); List versions = getStorage().getArtifactVersions(GROUP_ID, artifactId1); assertEquals(2, versions.size()); assertTrue(versions.contains(a1.getVersion())); - assertEquals(a1, getStorage().getArtifactVersionContent(GROUP_ID, artifactId1, vmdDto1_2.getVersion())); + assertEquals(a1, + getStorage().getArtifactVersionContent(GROUP_ID, artifactId1, vmdDto1_2.getVersion())); // define name in an older version metadata getStorage().updateArtifactVersionMetaData(GROUP_ID, artifactId1, vmdDto1_1.getVersion(), EditableVersionMetaDataDto.builder().name("foo").build()); - ArtifactVersionMetaDataDto vmeta1 = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, vmdDto1_1.getVersion()); - ArtifactVersionMetaDataDto vmeta2 = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, vmdDto1_2.getVersion()); + ArtifactVersionMetaDataDto vmeta1 = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, + vmdDto1_1.getVersion()); + ArtifactVersionMetaDataDto vmeta2 = getStorage().getArtifactVersionMetaData(GROUP_ID, artifactId1, + vmdDto1_2.getVersion()); assertNotEquals(vmeta1, vmeta2); assertEquals("foo", vmeta1.getName()); assertNull(vmeta2.getName()); @@ -159,9 +162,7 @@ public void testRules() throws Exception { // Create artifact EditableArtifactMetaDataDto artifactMetaData = EditableArtifactMetaDataDto.builder().build(); ContentWrapperDto versionContent1 = ContentWrapperDto.builder() - .content(ContentHandle.create("content1")) - .contentType(ContentTypes.APPLICATION_JSON) - .build(); + .content(ContentHandle.create("content1")).contentType(ContentTypes.APPLICATION_JSON).build(); EditableVersionMetaDataDto versionMetaData = EditableVersionMetaDataDto.builder().build(); getStorage().createArtifact(GROUP_ID, artifactId, ArtifactType.JSON, artifactMetaData, null, versionContent1, versionMetaData, List.of()).getRight(); @@ -178,7 +179,8 @@ public void testRules() throws Exception { assertEquals(1, getStorage().getArtifactRules(GROUP_ID, artifactId).size()); assertTrue(getStorage().getArtifactRules(GROUP_ID, artifactId).contains(RuleType.VALIDITY)); - assertEquals("config", getStorage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY).getConfiguration()); + assertEquals("config", + getStorage().getArtifactRule(GROUP_ID, artifactId, RuleType.VALIDITY).getConfiguration()); assertEquals(1, getStorage().getGlobalRules().size()); assertTrue(getStorage().getGlobalRules().contains(RuleType.VALIDITY)); @@ -193,18 +195,19 @@ public void testLimitGetArtifactIds() throws Exception { final String testId2 = TestUtils.generateArtifactId(); try { - ContentWrapperDto content = ContentWrapperDto.builder() - .content(ContentHandle.create("{}")) - .contentType(ContentTypes.APPLICATION_JSON) - .build(); + ContentWrapperDto content = ContentWrapperDto.builder().content(ContentHandle.create("{}")) + .contentType(ContentTypes.APPLICATION_JSON).build(); - getStorage().createArtifact(GROUP_ID, testId0, ArtifactType.JSON, null, null, content, null, List.of()); + getStorage().createArtifact(GROUP_ID, testId0, ArtifactType.JSON, null, null, content, null, + List.of()); int size = getStorage().getArtifactIds(null).size(); // Create 2 artifacts - getStorage().createArtifact(GROUP_ID, testId1, ArtifactType.JSON, null, null, content, null, List.of()); - getStorage().createArtifact(GROUP_ID, testId2, ArtifactType.JSON, null, null, content, null, List.of()); + getStorage().createArtifact(GROUP_ID, testId1, ArtifactType.JSON, null, null, content, null, + List.of()); + getStorage().createArtifact(GROUP_ID, testId2, ArtifactType.JSON, null, null, content, null, + List.of()); int newSize = getStorage().getArtifactIds(null).size(); int limitedSize = getStorage().getArtifactIds(1).size(); diff --git a/app/src/test/java/io/apicurio/registry/noprofile/validity/ValidityRuleApplicationTest.java b/app/src/test/java/io/apicurio/registry/noprofile/validity/ValidityRuleApplicationTest.java index 442ce835bc..c33af1b8fc 100644 --- a/app/src/test/java/io/apicurio/registry/noprofile/validity/ValidityRuleApplicationTest.java +++ b/app/src/test/java/io/apicurio/registry/noprofile/validity/ValidityRuleApplicationTest.java @@ -68,7 +68,8 @@ public void testValidityRuleApplication() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig(ValidityLevel.FULL.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().post(createRule); var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { createArtifactVersion(artifactId, INVALID_SCHEMA, ContentTypes.APPLICATION_JSON); @@ -84,7 +85,8 @@ public void testValidityRuleApplication_Map() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig(ValidityLevel.FULL.name()); - clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).rules().post(createRule); var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> { createArtifactVersion(artifactId, INVALID_SCHEMA_WITH_MAP, ContentTypes.APPLICATION_JSON); diff --git a/app/src/test/java/io/apicurio/registry/rbac/AdminClientTest.java b/app/src/test/java/io/apicurio/registry/rbac/AdminClientTest.java index d733b3fb59..b2cbd617f1 100644 --- a/app/src/test/java/io/apicurio/registry/rbac/AdminClientTest.java +++ b/app/src/test/java/io/apicurio/registry/rbac/AdminClientTest.java @@ -50,24 +50,26 @@ public void smokeGlobalRules() throws Exception { @Test public void getGlobalRuleConfig() throws Exception { - //Preparation + // Preparation createGlobalRule(RuleType.COMPATIBILITY, "BACKWARD"); { - //Execution - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()).get(); - //Assertions + // Execution + final Rule globalRuleConfig = clientV3.admin().rules() + .byRuleType(RuleType.COMPATIBILITY.getValue()).get(); + // Assertions assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); } } @Test public void updateGlobalRuleConfig() throws Exception { - //Preparation + // Preparation createGlobalRule(RuleType.COMPATIBILITY, "BACKWARD"); { - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()).get(); + final Rule globalRuleConfig = clientV3.admin().rules() + .byRuleType(RuleType.COMPATIBILITY.getValue()).get(); assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); } @@ -75,30 +77,32 @@ public void updateGlobalRuleConfig() throws Exception { toUpdate.setRuleType(RuleType.COMPATIBILITY); toUpdate.setConfig("FORWARD"); - //Execution - final Rule updated = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()).put(toUpdate); + // Execution + final Rule updated = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()) + .put(toUpdate); - //Assertions + // Assertions assertEquals(updated.getConfig(), "FORWARD"); } @Test public void deleteGlobalRule() throws Exception { - //Preparation + // Preparation createGlobalRule(RuleType.COMPATIBILITY, "BACKWARD"); { - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()).get(); + final Rule globalRuleConfig = clientV3.admin().rules() + .byRuleType(RuleType.COMPATIBILITY.getValue()).get(); assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); } - //Execution + // Execution clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.getValue()).delete(); { final List ruleTypes = clientV3.admin().rules().get(); - //Assertions + // Assertions assertEquals(0, ruleTypes.size()); } } @@ -203,7 +207,8 @@ public void testRoleMappings() throws Exception { clientV3.admin().roleMappings().byPrincipalId("TestUser").delete(); } - protected CreateRule createGlobalRule(RuleType ruleType, String ruleConfig) throws ExecutionException, InterruptedException, TimeoutException { + protected CreateRule createGlobalRule(RuleType ruleType, String ruleConfig) + throws ExecutionException, InterruptedException, TimeoutException { final CreateRule createRule = new CreateRule(); createRule.setConfig(ruleConfig); createRule.setRuleType(ruleType); diff --git a/app/src/test/java/io/apicurio/registry/rbac/AdminResourceTest.java b/app/src/test/java/io/apicurio/registry/rbac/AdminResourceTest.java index 236fd7a5e5..354fcb1dd5 100644 --- a/app/src/test/java/io/apicurio/registry/rbac/AdminResourceTest.java +++ b/app/src/test/java/io/apicurio/registry/rbac/AdminResourceTest.java @@ -52,51 +52,31 @@ public class AdminResourceTest extends AbstractResourceTestBase { @Test public void testGlobalRulesEndpoint() { - given() - .when() - .contentType(CT_JSON) - .get("/registry/v3/admin/rules") - .then() - .statusCode(200) + given().when().contentType(CT_JSON).get("/registry/v3/admin/rules").then().statusCode(200) .body(anything()); } @Test public void testCreateGlobalRule() throws Exception { - //Test Rule type null + // Test Rule type null CreateRule nullType = new CreateRule(); nullType.setRuleType(null); nullType.setConfig("TestConfig"); - given() - .when() - .contentType(CT_JSON) - .body(nullType) - .post("/registry/v3/admin/rules") - .then() + given().when().contentType(CT_JSON).body(nullType).post("/registry/v3/admin/rules").then() .statusCode(400); - //Test Rule config null + // Test Rule config null CreateRule nullConfig = new CreateRule(); nullConfig.setRuleType(RuleType.VALIDITY); nullConfig.setConfig(null); - given() - .when() - .contentType(CT_JSON) - .body(nullConfig) - .post("/registry/v3/admin/rules") - .then() + given().when().contentType(CT_JSON).body(nullConfig).post("/registry/v3/admin/rules").then() .statusCode(400); - //Test Rule config empty + // Test Rule config empty CreateRule emptyConfig = new CreateRule(); emptyConfig.setRuleType(RuleType.VALIDITY); emptyConfig.setConfig(""); - given() - .when() - .contentType(CT_JSON) - .body(emptyConfig) - .post("/registry/v3/admin/rules") - .then() + given().when().contentType(CT_JSON).body(emptyConfig).post("/registry/v3/admin/rules").then() .statusCode(400); } @@ -107,57 +87,32 @@ public void testGlobalRules() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON).body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(204).body(anything()); // Verify the rule was added. { - given() - .when() - .get("/registry/v3/admin/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().get("/registry/v3/admin/rules/VALIDITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); } // Try to add the rule again - should get a 409 { - given() - .when() - .contentType(CT_JSON).body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(409).body("error_code", equalTo(409)) .body("message", equalTo("A rule named 'VALIDITY' already exists.")); } // Add another global rule createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig("BACKWARD"); - given() - .when() - .contentType(CT_JSON) - .body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(204).body(anything()); // Get the list of rules (should be 2 of them) { - given() - .when() - .get("/registry/v3/admin/rules") - .then() - .statusCode(200) + given().when().get("/registry/v3/admin/rules").then().statusCode(200) .contentType(ContentType.JSON) .body("[0]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))) .body("[1]", anyOf(equalTo("VALIDITY"), equalTo("COMPATIBILITY"))) @@ -165,101 +120,54 @@ public void testGlobalRules() throws Exception { } // Get a single rule by name - given() - .when() - .get("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().get("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("BACKWARD")); // Update a rule's config Rule rule = new Rule(); rule.setRuleType(RuleType.COMPATIBILITY); rule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .body(rule) - .put("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().contentType(CT_JSON).body(rule).put("/registry/v3/admin/rules/COMPATIBILITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("FULL")); // Get a single (updated) rule by name { - given() - .when() - .get("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().get("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("FULL")); } // Delete a rule - given() - .when() - .delete("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(204) + given().when().delete("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(204) .body(anything()); // Get a single (deleted) rule by name (should fail with a 404) { - given() - .when() - .get("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().get("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(404) + .contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'COMPATIBILITY' was found.")); } // Get the list of rules (should be 1 of them) { - given() - .when() - .get("/registry/v3/admin/rules") - .then() - .log().all() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", equalTo("VALIDITY")) - .body("[1]", nullValue()); + given().when().get("/registry/v3/admin/rules").then().log().all().statusCode(200) + .contentType(ContentType.JSON).body("[0]", equalTo("VALIDITY")).body("[1]", nullValue()); } // Delete all rules - given() - .when() - .delete("/registry/v3/admin/rules") - .then() - .statusCode(204); + given().when().delete("/registry/v3/admin/rules").then().statusCode(204); // Get the list of rules (no rules now) { - given() - .when() - .get("/registry/v3/admin/rules") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("[0]", nullValue()); + given().when().get("/registry/v3/admin/rules").then().statusCode(200) + .contentType(ContentType.JSON).body("[0]", nullValue()); } // Get the other (deleted) rule by name (should fail with a 404) - given() - .when() - .get("/registry/v3/admin/rules/VALIDITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().get("/registry/v3/admin/rules/VALIDITY").then().statusCode(404) + .contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'VALIDITY' was found.")); } @@ -270,24 +178,13 @@ public void testIntegrityRule() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.INTEGRITY); createRule.setConfig(IntegrityLevel.NO_DUPLICATES.name()); - given() - .when() - .contentType(CT_JSON) - .body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(204).body(anything()); // Get the rule by name { - given() - .when() - .get("/registry/v3/admin/rules/INTEGRITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("INTEGRITY")) + given().when().get("/registry/v3/admin/rules/INTEGRITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("INTEGRITY")) .body("config", equalTo("NO_DUPLICATES")); } @@ -296,26 +193,14 @@ public void testIntegrityRule() throws Exception { Rule rule = new Rule(); rule.setRuleType(RuleType.INTEGRITY); rule.setConfig(newConfig); - given() - .when() - .contentType(CT_JSON) - .body(rule) - .put("/registry/v3/admin/rules/INTEGRITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("INTEGRITY")) + given().when().contentType(CT_JSON).body(rule).put("/registry/v3/admin/rules/INTEGRITY").then() + .statusCode(200).contentType(ContentType.JSON).body("ruleType", equalTo("INTEGRITY")) .body("config", equalTo(newConfig)); // Verify new config { - given() - .when() - .get("/registry/v3/admin/rules/INTEGRITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("INTEGRITY")) + given().when().get("/registry/v3/admin/rules/INTEGRITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("INTEGRITY")) .body("config", equalTo(newConfig)); } @@ -327,43 +212,23 @@ public void testDeleteAllGlobalRules() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - given() - .when() - .contentType(CT_JSON) - .body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(204).body(anything()); // Get a single rule by name { - given() - .when() - .get("/registry/v3/admin/rules/VALIDITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("VALIDITY")) + given().when().get("/registry/v3/admin/rules/VALIDITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("VALIDITY")) .body("config", equalTo("FULL")); } // Delete all rules - given() - .when() - .delete("/registry/v3/admin/rules") - .then() - .statusCode(204); + given().when().delete("/registry/v3/admin/rules").then().statusCode(204); // Get the (deleted) rule by name (should fail with a 404) { - given() - .when() - .get("/registry/v3/admin/rules/VALIDITY") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().get("/registry/v3/admin/rules/VALIDITY").then().statusCode(404) + .contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No rule named 'VALIDITY' was found.")); } } @@ -374,24 +239,13 @@ public void testCompatilibityLevelNone() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.NONE.name()); - given() - .when() - .contentType(CT_JSON) - .body(createRule) - .post("/registry/v3/admin/rules") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(createRule).post("/registry/v3/admin/rules").then() + .statusCode(204).body(anything()); // Get a single rule by name { - given() - .when() - .get("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().get("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("NONE")); } } @@ -405,14 +259,11 @@ void testExport() throws Exception { for (int idx = 0; idx < 5; idx++) { String title = "Empty API " + idx; String artifactId = "Empty-" + idx; - this.createArtifact(group, artifactId, ArtifactType.OPENAPI, artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); + this.createArtifact(group, artifactId, ArtifactType.OPENAPI, + artifactContent.replaceAll("Empty API", title), ContentTypes.APPLICATION_JSON); } - ValidatableResponse response = given() - .when() - .get("/registry/v3/admin/export") - .then() - .statusCode(200); + ValidatableResponse response = given().when().get("/registry/v3/admin/export").then().statusCode(200); InputStream body = response.extract().asInputStream(); ZipInputStream zip = new ZipInputStream(body); @@ -452,34 +303,25 @@ void testExportForBrowser() throws Exception { String title = "Empty API " + idx + " " + suffix; String artifactId = "Empty-" + idx; String emptyApiContent = artifactContent.replaceAll("Empty API", title); - List refs = idx > 0 ? getSingletonRefList(group, "Empty-" + (idx - 1), "1", "ref") : Collections.emptyList(); + List refs = idx > 0 + ? getSingletonRefList(group, "Empty-" + (idx - 1), "1", "ref") : Collections.emptyList(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, emptyApiContent, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + emptyApiContent, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().getContent().setReferences(refs); clientV3.groups().byGroupId(group).artifacts().post(createArtifact); } // Export data (browser flow). - String downloadHref = given() - .when() - .queryParam("forBrowser", "true") - .get("/registry/v3/admin/export") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("downloadId", notNullValue()) - .body("href", notNullValue()) - .extract().body().path("href"); + String downloadHref = given().when().queryParam("forBrowser", "true").get("/registry/v3/admin/export") + .then().statusCode(200).contentType(ContentType.JSON).body("downloadId", notNullValue()) + .body("href", notNullValue()).extract().body().path("href"); Assertions.assertTrue(downloadHref.startsWith("/apis/")); downloadHref = downloadHref.substring(5); // Follow href in response - ValidatableResponse response = given() - .when() - .get(downloadHref) - .then() - .statusCode(200); + ValidatableResponse response = given().when().get(downloadHref).then().statusCode(200); InputStream body = response.extract().asInputStream(); ZipInputStream zip = new ZipInputStream(body); @@ -504,11 +346,7 @@ void testExportForBrowser() throws Exception { Assertions.assertTrue(versionCounter.get() >= 5); // Try the download href again - should fail with 404 because it was already consumed. - given() - .when() - .get(downloadHref) - .then() - .statusCode(404); + given().when().get(downloadHref).then().statusCode(404); } @Test @@ -521,31 +359,20 @@ void testImport() throws Exception { int artifactsBefore = result.getCount(); try (InputStream data = resourceToInputStream("../rest/v3/export.zip")) { - given() - .when() - .contentType("application/zip") - .body(data) - .post("/registry/v3/admin/import") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType("application/zip").body(data).post("/registry/v3/admin/import").then() + .statusCode(204).body(anything()); } // Verify global rules were imported { - given() - .when() - .get("/registry/v3/admin/rules/COMPATIBILITY") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("ruleType", equalTo("COMPATIBILITY")) + given().when().get("/registry/v3/admin/rules/COMPATIBILITY").then().statusCode(200) + .contentType(ContentType.JSON).body("ruleType", equalTo("COMPATIBILITY")) .body("config", equalTo("BACKWARD")); } // Verify artifacts were imported // Verify all artifact versions were imported - //total num of artifacts 3 + // total num of artifacts 3 result = clientV3.search().artifacts().get(config -> { config.queryParameters.offset = 0; config.queryParameters.limit = 5; @@ -554,206 +381,128 @@ void testImport() throws Exception { assertEquals(3, newArtifacts); // Verify comments were imported - List comments = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-1").versions().byVersionExpression("1.0.2").comments().get(); + List comments = clientV3.groups().byGroupId("ImportTest").artifacts() + .byArtifactId("Artifact-1").versions().byVersionExpression("1.0.2").comments().get(); assertNotNull(comments); assertEquals(2, comments.size()); assertEquals("COMMENT-2", comments.get(0).getValue()); assertEquals("COMMENT-1", comments.get(1).getValue()); - comments = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-2").versions().byVersionExpression("1.0.1").comments().get(); + comments = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-2").versions() + .byVersionExpression("1.0.1").comments().get(); assertNotNull(comments); assertEquals(1, comments.size()); assertEquals("COMMENT-3", comments.get(0).getValue()); // Verify artifact rules were imported - var rule = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-1").rules().byRuleType(RuleType.VALIDITY.name()).get(); + var rule = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-1").rules() + .byRuleType(RuleType.VALIDITY.name()).get(); assertNotNull(rule); assertEquals("SYNTAX_ONLY", rule.getConfig()); - //the biggest globalId in the export file is 1005 + // the biggest globalId in the export file is 1005 assertNotNull(clientV3.ids().globalIds().byGlobalId(1005L).get()); - //this is the artifactId for the artifact with globalId 1005 - var lastArtifactMeta = clientV3.groups().byGroupId("ImportTest").artifacts().byArtifactId("Artifact-3").versions().byVersionExpression("branch=latest").get(); + // this is the artifactId for the artifact with globalId 1005 + var lastArtifactMeta = clientV3.groups().byGroupId("ImportTest").artifacts() + .byArtifactId("Artifact-3").versions().byVersionExpression("branch=latest").get(); assertEquals("1.0.2", lastArtifactMeta.getVersion()); assertEquals(1005L, lastArtifactMeta.getGlobalId()); - var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> clientV3.ids().globalIds().byGlobalId(1006L).get()); - //ArtifactNotFoundException + var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, + () -> clientV3.ids().globalIds().byGlobalId(1006L).get()); + // ArtifactNotFoundException Assertions.assertEquals("ArtifactNotFoundException", exception.getName()); Assertions.assertEquals(404, exception.getErrorCode()); } - @Test public void testRoleMappings() throws Exception { // Start with no role mappings - given() - .when() - .get("/registry/v3/admin/roleMappings") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("roleMappings[0]", nullValue()); + given().when().get("/registry/v3/admin/roleMappings").then().statusCode(200) + .contentType(ContentType.JSON).body("roleMappings[0]", nullValue()); // Add RoleMapping mapping = new RoleMapping(); mapping.setPrincipalId("TestUser"); mapping.setRole(RoleType.DEVELOPER); mapping.setPrincipalName("Foo bar"); - given() - .when() - .contentType(CT_JSON).body(mapping) - .post("/registry/v3/admin/roleMappings") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(mapping).post("/registry/v3/admin/roleMappings").then() + .statusCode(204).body(anything()); // Verify the mapping was added. - given() - .when() - .get("/registry/v3/admin/roleMappings/TestUser") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("principalId", equalTo("TestUser")) - .body("principalName", equalTo("Foo bar")) - .body("role", equalTo("DEVELOPER")); - given() - .when() - .get("/registry/v3/admin/roleMappings") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("roleMappings[0].principalId", equalTo("TestUser")) + given().when().get("/registry/v3/admin/roleMappings/TestUser").then().statusCode(200) + .contentType(ContentType.JSON).body("principalId", equalTo("TestUser")) + .body("principalName", equalTo("Foo bar")).body("role", equalTo("DEVELOPER")); + given().when().get("/registry/v3/admin/roleMappings").then().statusCode(200) + .contentType(ContentType.JSON).body("roleMappings[0].principalId", equalTo("TestUser")) .body("roleMappings[0].principalName", equalTo("Foo bar")) .body("roleMappings[0].role", equalTo("DEVELOPER")); // Try to add the rule again - should get a 409 - given() - .when() - .contentType(CT_JSON).body(mapping) - .post("/registry/v3/admin/roleMappings") - .then() - .statusCode(409) - .body("error_code", equalTo(409)) - .body("message", equalTo("A mapping for principal 'TestUser' and role 'DEVELOPER' already exists.")); + given().when().contentType(CT_JSON).body(mapping).post("/registry/v3/admin/roleMappings").then() + .statusCode(409).body("error_code", equalTo(409)).body("message", + equalTo("A mapping for principal 'TestUser' and role 'DEVELOPER' already exists.")); // Add another mapping mapping.setPrincipalId("TestUser2"); mapping.setRole(RoleType.ADMIN); - given() - .when() - .contentType(CT_JSON) - .body(mapping) - .post("/registry/v3/admin/roleMappings") - .then() - .statusCode(204) - .body(anything()); + given().when().contentType(CT_JSON).body(mapping).post("/registry/v3/admin/roleMappings").then() + .statusCode(204).body(anything()); // Get the list of mappings (should be 2 of them) - given() - .when() - .get("/registry/v3/admin/roleMappings") - .then() - .statusCode(200) + given().when().get("/registry/v3/admin/roleMappings").then().statusCode(200) .contentType(ContentType.JSON) .body("roleMappings[0].principalId", anyOf(equalTo("TestUser"), equalTo("TestUser2"))) .body("roleMappings[1].principalId", anyOf(equalTo("TestUser"), equalTo("TestUser2"))) .body("roleMappings[2]", nullValue()); // Get a single mapping by principal - given() - .when() - .get("/registry/v3/admin/roleMappings/TestUser2") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("principalId", equalTo("TestUser2")) + given().when().get("/registry/v3/admin/roleMappings/TestUser2").then().statusCode(200) + .contentType(ContentType.JSON).body("principalId", equalTo("TestUser2")) .body("role", equalTo("ADMIN")); // Update a mapping UpdateRole update = new UpdateRole(); update.setRole(RoleType.READ_ONLY); - given() - .when() - .contentType(CT_JSON) - .body(update) - .put("/registry/v3/admin/roleMappings/TestUser") - .then() - .statusCode(204); + given().when().contentType(CT_JSON).body(update).put("/registry/v3/admin/roleMappings/TestUser") + .then().statusCode(204); // Get a single (updated) mapping - given() - .when() - .get("/registry/v3/admin/roleMappings/TestUser") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("principalId", equalTo("TestUser")) + given().when().get("/registry/v3/admin/roleMappings/TestUser").then().statusCode(200) + .contentType(ContentType.JSON).body("principalId", equalTo("TestUser")) .body("role", equalTo("READ_ONLY")); // Try to update a role mapping that doesn't exist - given() - .when() - .contentType(CT_JSON) - .body(update) - .put("/registry/v3/admin/roleMappings/UnknownPrincipal") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) - .body("message", equalTo("No mapping for principal 'UnknownPrincipal' and role 'READ_ONLY' was found.")); + given().when().contentType(CT_JSON).body(update) + .put("/registry/v3/admin/roleMappings/UnknownPrincipal").then().statusCode(404) + .contentType(ContentType.JSON).body("error_code", equalTo(404)).body("message", equalTo( + "No mapping for principal 'UnknownPrincipal' and role 'READ_ONLY' was found.")); - //Update a mapping with null RoleType + // Update a mapping with null RoleType update.setRole(null); - given() - .when() - .contentType(CT_JSON) - .body(update) - .put("/registry/v3/admin/roleMappings/TestUser") - .then() - .statusCode(400); + given().when().contentType(CT_JSON).body(update).put("/registry/v3/admin/roleMappings/TestUser") + .then().statusCode(400); // Delete a role mapping - given() - .when() - .delete("/registry/v3/admin/roleMappings/TestUser2") - .then() - .statusCode(204) + given().when().delete("/registry/v3/admin/roleMappings/TestUser2").then().statusCode(204) .body(anything()); // Get the (deleted) mapping by name (should fail with a 404) - given() - .when() - .get("/registry/v3/admin/roleMappings/TestUser2") - .then() - .statusCode(404) - .contentType(ContentType.JSON) - .body("error_code", equalTo(404)) + given().when().get("/registry/v3/admin/roleMappings/TestUser2").then().statusCode(404) + .contentType(ContentType.JSON).body("error_code", equalTo(404)) .body("message", equalTo("No role mapping for principal 'TestUser2' was found.")); // Get the list of mappings (should be 1 of them) - given() - .when() - .get("/registry/v3/admin/roleMappings") - .then() - .log().all() - .statusCode(200) - .contentType(ContentType.JSON) - .body("roleMappings[0].principalId", equalTo("TestUser")) + given().when().get("/registry/v3/admin/roleMappings").then().log().all().statusCode(200) + .contentType(ContentType.JSON).body("roleMappings[0].principalId", equalTo("TestUser")) .body("roleMappings[1]", nullValue()); // Clean up - given() - .when() - .delete("/registry/v3/admin/roleMappings/TestUser") - .then() - .statusCode(204) + given().when().delete("/registry/v3/admin/roleMappings/TestUser").then().statusCode(204) .body(anything()); } - @Test public void testRoleMappingPaging() throws Exception { // Start with no role mappings @@ -767,179 +516,120 @@ public void testRoleMappingPaging() throws Exception { mapping.setPrincipalName("Principal " + i); clientV3.admin().roleMappings().post(mapping); } - + // Make sure we created 20 mappings Assertions.assertEquals(20, clientV3.admin().roleMappings().get().getCount().intValue()); - + // Get the first 5 - RoleMappingSearchResults results = clientV3.admin().roleMappings().get(config -> config.queryParameters.limit = 5); + RoleMappingSearchResults results = clientV3.admin().roleMappings() + .get(config -> config.queryParameters.limit = 5); Assertions.assertEquals(5, results.getRoleMappings().size()); Assertions.assertEquals(20, results.getCount()); Assertions.assertEquals("principal-0", results.getRoleMappings().get(0).getPrincipalId()); Assertions.assertEquals("principal-4", results.getRoleMappings().get(4).getPrincipalId()); // Get the second 5 - results = clientV3.admin().roleMappings().get(config -> { config.queryParameters.limit = 5; config.queryParameters.offset = 5; }); + results = clientV3.admin().roleMappings().get(config -> { + config.queryParameters.limit = 5; + config.queryParameters.offset = 5; + }); Assertions.assertEquals(5, results.getRoleMappings().size()); Assertions.assertEquals(20, results.getCount()); Assertions.assertEquals("principal-5", results.getRoleMappings().get(0).getPrincipalId()); Assertions.assertEquals("principal-9", results.getRoleMappings().get(4).getPrincipalId()); - + // Cleanup for (int i = 0; i < 20; i++) { clientV3.admin().roleMappings().byPrincipalId("principal-" + i).delete(); } } - @Test public void testConfigProperties() throws Exception { String property1Name = "apicurio.ccompat.legacy-id-mode.enabled"; String property2Name = "apicurio.rest.artifact.deletion.enabled"; // Start with default mappings - given() - .when() - .get("/registry/v3/admin/config/properties") - .then() - .statusCode(200) + given().when().get("/registry/v3/admin/config/properties").then().statusCode(200) .contentType(ContentType.JSON); // Fetch property 1, should be false - given() - .when() - .pathParam("propertyName", property1Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property1Name)) + given().when().pathParam("propertyName", property1Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property1Name)) .body("value", equalTo("false")); // Fetch property 2, should be false - given() - .when() - .pathParam("propertyName", property2Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property2Name)) + given().when().pathParam("propertyName", property2Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property2Name)) .body("value", equalTo("true")); // Set value for property 1 UpdateConfigurationProperty update = new UpdateConfigurationProperty(); update.setValue("true"); - given() - .when() - .contentType(CT_JSON) - .body(update) - .pathParam("propertyName", property1Name) - .put("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(204) + given().when().contentType(CT_JSON).body(update).pathParam("propertyName", property1Name) + .put("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(204) .body(anything()); // Verify the property was set. - given() - .when() - .pathParam("propertyName", property1Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property1Name)) + given().when().pathParam("propertyName", property1Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property1Name)) .body("value", equalTo("true")); // Set value for property 2 update = new UpdateConfigurationProperty(); update.setValue("false"); - given() - .when() - .contentType(CT_JSON).body(update) - .pathParam("propertyName", property2Name) - .put("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(204) + given().when().contentType(CT_JSON).body(update).pathParam("propertyName", property2Name) + .put("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(204) .body(anything()); // Verify the property was set. - given() - .when() - .pathParam("propertyName", property2Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property2Name)) + given().when().pathParam("propertyName", property2Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property2Name)) .body("value", equalTo("false")); // Reset a config property - given() - .when() - .pathParam("propertyName", property2Name) - .delete("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(204) + given().when().pathParam("propertyName", property2Name) + .delete("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(204) .body(anything()); // Verify the property was reset. - given() - .when() - .pathParam("propertyName", property2Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property2Name)) + given().when().pathParam("propertyName", property2Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property2Name)) .body("value", equalTo("true")); // Reset the other property - given() - .when() - .contentType(CT_JSON).body(update) - .pathParam("propertyName", property1Name) - .delete("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(204) + given().when().contentType(CT_JSON).body(update).pathParam("propertyName", property1Name) + .delete("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(204) .body(anything()); // Verify the property was reset - given() - .when() - .pathParam("propertyName", property1Name) - .get("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(200) - .contentType(ContentType.JSON) - .body("name", equalTo(property1Name)) + given().when().pathParam("propertyName", property1Name) + .get("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(200) + .contentType(ContentType.JSON).body("name", equalTo(property1Name)) .body("value", equalTo("false")); // Try to set a config property that doesn't exist. update = new UpdateConfigurationProperty(); update.setValue("foobar"); - given() - .when() - .contentType(CT_JSON).body(update) - .pathParam("propertyName", "property-does-not-exist") - .put("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(404); + given().when().contentType(CT_JSON).body(update).pathParam("propertyName", "property-does-not-exist") + .put("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(404); // Try to set a Long property to "foobar" (should be invalid type) update = new UpdateConfigurationProperty(); update.setValue("foobar"); - given() - .when() - .contentType(CT_JSON).body(update) + given().when().contentType(CT_JSON).body(update) .pathParam("propertyName", "apicurio.download.href.ttl.seconds") - .put("/registry/v3/admin/config/properties/{propertyName}") - .then() - .statusCode(400); + .put("/registry/v3/admin/config/properties/{propertyName}").then().statusCode(400); } - private List getSingletonRefList(String groupId, String artifactId, String version, String name) { + private List getSingletonRefList(String groupId, String artifactId, String version, + String name) { ArtifactReference artifactReference = new ArtifactReference(); artifactReference.setGroupId(groupId); artifactReference.setArtifactId(artifactId); diff --git a/app/src/test/java/io/apicurio/registry/rbac/MockAuditLogService.java b/app/src/test/java/io/apicurio/registry/rbac/MockAuditLogService.java index c796393ff3..a08f0ee572 100644 --- a/app/src/test/java/io/apicurio/registry/rbac/MockAuditLogService.java +++ b/app/src/test/java/io/apicurio/registry/rbac/MockAuditLogService.java @@ -9,17 +9,18 @@ import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; - @Mock public class MockAuditLogService extends AuditLogService { private static final List> auditLogs = new CopyOnWriteArrayList<>(); /** - * @see io.apicurio.registry.logging.audit.AuditLogService#log(java.lang.String, java.lang.String, java.lang.String, java.util.Map, AuditHttpRequestInfo) + * @see io.apicurio.registry.logging.audit.AuditLogService#log(java.lang.String, java.lang.String, + * java.lang.String, java.util.Map, AuditHttpRequestInfo) */ @Override - public void log(String invoker, String action, String result, Map metadata, AuditHttpRequestInfo requestInfo) { + public void log(String invoker, String action, String result, Map metadata, + AuditHttpRequestInfo requestInfo) { super.log(invoker, action, result, metadata, requestInfo); Map audit = new HashMap<>(metadata); audit.put("action", action); diff --git a/app/src/test/java/io/apicurio/registry/rbac/RegistryClientTest.java b/app/src/test/java/io/apicurio/registry/rbac/RegistryClientTest.java index bad6068892..79fef6a44c 100644 --- a/app/src/test/java/io/apicurio/registry/rbac/RegistryClientTest.java +++ b/app/src/test/java/io/apicurio/registry/rbac/RegistryClientTest.java @@ -81,38 +81,21 @@ public class RegistryClientTest extends AbstractResourceTestBase { private static final Logger LOGGER = LoggerFactory.getLogger(RegistryClientTest.class); - private static final String ARTIFACT_OPENAPI_YAML_CONTENT = "openapi: \"3.0.2\"\n" + - "info:\n" + - " description: \"Description\"\n" + - " version: \"1.0.0\"\n" + - " title: \"OpenAPI\"\n" + - "paths:"; - private static final String UPDATED_OPENAPI_YAML_CONTENT = "openapi: \"3.0.2\"\n" + - "info:\n" + - " description: \"Description v2\"\n" + - " version: \"2.0.0\"\n" + - " title: \"OpenAPI\"\n" + - "paths:"; - - private static final String ARTIFACT_OPENAPI_JSON_CONTENT = "{\n" + - " \"openapi\" : \"3.0.2\",\n" + - " \"info\" : {\n" + - " \"description\" : \"Description\",\n" + - " \"version\" : \"1.0.0\",\n" + - " \"title\" : \"OpenAPI\"\n" + - " },\n" + - " \"paths\" : null\n" + - "}"; - private static final String UPDATED_OPENAPI_JSON_CONTENT = "{\n" + - " \"openapi\" : \"3.0.2\",\n" + - " \"info\" : {\n" + - " \"description\" : \"Description v2\",\n" + - " \"version\" : \"2.0.0\",\n" + - " \"title\" : \"OpenAPI\"\n" + - " },\n" + - " \"paths\" : null\n" + - "}"; - + private static final String ARTIFACT_OPENAPI_YAML_CONTENT = "openapi: \"3.0.2\"\n" + "info:\n" + + " description: \"Description\"\n" + " version: \"1.0.0\"\n" + " title: \"OpenAPI\"\n" + + "paths:"; + private static final String UPDATED_OPENAPI_YAML_CONTENT = "openapi: \"3.0.2\"\n" + "info:\n" + + " description: \"Description v2\"\n" + " version: \"2.0.0\"\n" + " title: \"OpenAPI\"\n" + + "paths:"; + + private static final String ARTIFACT_OPENAPI_JSON_CONTENT = "{\n" + " \"openapi\" : \"3.0.2\",\n" + + " \"info\" : {\n" + " \"description\" : \"Description\",\n" + + " \"version\" : \"1.0.0\",\n" + " \"title\" : \"OpenAPI\"\n" + " },\n" + + " \"paths\" : null\n" + "}"; + private static final String UPDATED_OPENAPI_JSON_CONTENT = "{\n" + " \"openapi\" : \"3.0.2\",\n" + + " \"info\" : {\n" + " \"description\" : \"Description v2\",\n" + + " \"version\" : \"2.0.0\",\n" + " \"title\" : \"OpenAPI\"\n" + " },\n" + + " \"paths\" : null\n" + "}"; private static final String SCHEMA_WITH_REFERENCE = "{\r\n \"namespace\":\"com.example.common\",\r\n \"name\":\"Item\",\r\n \"type\":\"record\",\r\n \"fields\":[\r\n {\r\n \"name\":\"itemId\",\r\n \"type\":\"com.example.common.ItemId\"\r\n }]\r\n}"; private static final String REFERENCED_SCHEMA = "{\"namespace\": \"com.example.common\", \"type\": \"record\", \"name\": \"ItemId\", \"fields\":[{\"name\":\"id\", \"type\":\"int\"}]}"; @@ -125,7 +108,7 @@ public class RegistryClientTest extends AbstractResourceTestBase { @Test public void testCreateArtifact() throws Exception { - //Preparation + // Preparation final String groupId = "testCreateArtifact"; final String artifactId = generateArtifactId(); @@ -133,26 +116,31 @@ public void testCreateArtifact() throws Exception { final String name = "testCreateArtifactName"; final String description = "testCreateArtifactDescription"; - //Execution - CreateArtifactResponse created = createArtifact(groupId, artifactId, ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON, (createArtifact -> { - createArtifact.setName(name); - createArtifact.setDescription(description); - createArtifact.getFirstVersion().setVersion(version); - })); + // Execution + CreateArtifactResponse created = createArtifact(groupId, artifactId, ArtifactType.JSON, + ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON, (createArtifact -> { + createArtifact.setName(name); + createArtifact.setDescription(description); + createArtifact.getFirstVersion().setVersion(version); + })); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getArtifact().getGroupId()); assertEquals(artifactId, created.getArtifact().getArtifactId()); assertEquals(version, created.getVersion().getVersion()); assertEquals(name, created.getArtifact().getName()); assertEquals(description, created.getArtifact().getDescription()); - assertEquals(ARTIFACT_CONTENT, new String(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes(), StandardCharsets.UTF_8)); + assertEquals(ARTIFACT_CONTENT, + new String( + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes(), + StandardCharsets.UTF_8)); } @Test public void groupsCrud() throws Exception { - //Preparation + // Preparation final String groupId = UUID.randomUUID().toString(); CreateGroup groupMetaData = new CreateGroup(); groupMetaData.setGroupId(groupId); @@ -166,8 +154,8 @@ public void groupsCrud() throws Exception { final GroupMetaData artifactGroup = clientV3.groups().byGroupId(groupId).get(); assertEquals(groupMetaData.getGroupId(), artifactGroup.getGroupId()); assertEquals(groupMetaData.getDescription(), artifactGroup.getDescription()); - assertEquals(groupMetaData.getLabels().getAdditionalData(), artifactGroup.getLabels().getAdditionalData()); - + assertEquals(groupMetaData.getLabels().getAdditionalData(), + artifactGroup.getLabels().getAdditionalData()); String group1Id = UUID.randomUUID().toString(); String group2Id = UUID.randomUUID().toString(); @@ -180,7 +168,6 @@ public void groupsCrud() throws Exception { groupMetaData.setGroupId(group3Id); clientV3.groups().post(groupMetaData); - GroupSearchResults groupSearchResults = clientV3.groups().get(config -> { config.queryParameters.offset = 0; config.queryParameters.limit = 100; @@ -195,14 +182,15 @@ public void groupsCrud() throws Exception { assertTrue(groupIds.containsAll(List.of(groupId, group1Id, group2Id, group3Id))); clientV3.groups().byGroupId(groupId).delete(); - var exception = Assert.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> clientV3.groups().byGroupId(groupId).get()); + var exception = Assert.assertThrows(io.apicurio.registry.rest.client.models.Error.class, + () -> clientV3.groups().byGroupId(groupId).get()); Assertions.assertEquals("GroupNotFoundException", exception.getName()); Assertions.assertEquals(404, exception.getErrorCode()); } @Test public void testCreateYamlArtifact() throws Exception { - //Preparation + // Preparation final String groupId = "testCreateYamlArtifact"; final String artifactId = generateArtifactId(); @@ -210,29 +198,33 @@ public void testCreateYamlArtifact() throws Exception { final String name = "testCreateYamlArtifactName"; final String description = "testCreateYamlArtifactDescription"; - //Execution - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, ARTIFACT_OPENAPI_YAML_CONTENT, ContentTypes.APPLICATION_JSON); + // Execution + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + ARTIFACT_OPENAPI_YAML_CONTENT, ContentTypes.APPLICATION_JSON); createArtifact.setName(name); createArtifact.setDescription(description); createArtifact.getFirstVersion().setVersion(version); createArtifact.getFirstVersion().setName(name); createArtifact.getFirstVersion().setDescription(description); - final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - //Assertions + // Assertions assertNotNull(created); assertEquals(groupId, created.getGroupId()); assertEquals(artifactId, created.getArtifactId()); assertEquals(version, created.getVersion()); assertEquals(name, created.getName()); assertEquals(description, created.getDescription()); - assertMultilineTextEquals(ARTIFACT_OPENAPI_YAML_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + assertMultilineTextEquals(ARTIFACT_OPENAPI_YAML_CONTENT, + IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get())); } @Test public void testCreateArtifactVersion() throws Exception { - //Preparation + // Preparation final String groupId = "testCreateArtifactVersion"; final String artifactId = generateArtifactId(); @@ -242,16 +234,19 @@ public void testCreateArtifactVersion() throws Exception { createArtifact(groupId, artifactId); - //Execution - CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_CONTENT, ContentTypes.APPLICATION_JSON); + // Execution + CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_CONTENT, + ContentTypes.APPLICATION_JSON); createVersion.setName(name); createVersion.setDescription(description); createVersion.setVersion(version); - VersionMetaData versionMetaData = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + VersionMetaData versionMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().post(createVersion); - ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + ArtifactMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .get(); - //Assertions + // Assertions assertNotNull(versionMetaData); assertEquals(version, versionMetaData.getVersion()); assertEquals(name, versionMetaData.getName()); @@ -261,12 +256,13 @@ public void testCreateArtifactVersion() throws Exception { assertEquals(artifactId, amd.getName()); assertNull(amd.getDescription()); - assertEquals(UPDATED_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + assertEquals(UPDATED_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); } @Test public void testCreateYamlArtifactVersion() throws Exception { - //Preparation + // Preparation final String groupId = "testCreateYamlArtifactVersion"; final String artifactId = generateArtifactId(); @@ -276,18 +272,21 @@ public void testCreateYamlArtifactVersion() throws Exception { createOpenAPIArtifact(groupId, artifactId); // Create first version of the openapi artifact using JSON - //Execution - CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_OPENAPI_YAML_CONTENT, ContentTypes.APPLICATION_JSON); + // Execution + CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_OPENAPI_YAML_CONTENT, + ContentTypes.APPLICATION_JSON); createVersion.setName(name); createVersion.setDescription(description); createVersion.setVersion(version); ArtifactContent content = new ArtifactContent(); content.setContent(UPDATED_OPENAPI_YAML_CONTENT); - VersionMetaData versionMetaData = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); - VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData versionMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().post(createVersion); + VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); - //Assertions + // Assertions assertNotNull(versionMetaData); assertEquals(version, versionMetaData.getVersion()); assertEquals(name, versionMetaData.getName()); @@ -298,20 +297,24 @@ public void testCreateYamlArtifactVersion() throws Exception { assertEquals(name, amd.getName()); assertEquals(description, amd.getDescription()); - assertMultilineTextEquals(UPDATED_OPENAPI_YAML_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + assertMultilineTextEquals(UPDATED_OPENAPI_YAML_CONTENT, + IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get())); } @Test public void testAsyncCRUD() throws Exception { auditLogService.resetAuditLogs(); - //Preparation + // Preparation final String groupId = "testAsyncCRUD"; String artifactId = generateArtifactId(); - //Execution + // Execution try { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); - VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); + VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); Assertions.assertNotNull(amd); Thread.sleep(2000); @@ -320,24 +323,28 @@ public void testAsyncCRUD() throws Exception { emd.setName("testAsyncCRUD"); clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(emd); - //Assertions + // Assertions { - ArtifactMetaData artifactMetaData = clientV3 - .groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + ArtifactMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).get(); Assertions.assertNotNull(artifactMetaData); Assertions.assertEquals("testAsyncCRUD", artifactMetaData.getName()); } - CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_CONTENT, ContentTypes.APPLICATION_JSON); - //Execution - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_CONTENT, + ContentTypes.APPLICATION_JSON); + // Execution + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - //Assertions - assertEquals(UPDATED_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + // Assertions + assertEquals(UPDATED_CONTENT, + IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get())); List> auditLogs = auditLogService.getAuditLogs(); assertFalse(auditLogs.isEmpty()); - assertEquals(3, auditLogs.size()); //Expected size 3 since we performed 3 audited operations + assertEquals(3, auditLogs.size()); // Expected size 3 since we performed 3 audited operations } finally { clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).delete(); @@ -346,7 +353,7 @@ public void testAsyncCRUD() throws Exception { @Test public void testSmoke() throws Exception { - //Preparation + // Preparation final String groupId = "testSmoke"; final String artifactId1 = generateArtifactId(); final String artifactId2 = generateArtifactId(); @@ -354,7 +361,7 @@ public void testSmoke() throws Exception { createArtifact(groupId, artifactId1); createArtifact(groupId, artifactId2); - //Execution + // Execution final ArtifactSearchResults searchResults = clientV3.search().artifacts().get(config -> { config.queryParameters.groupId = groupId; config.queryParameters.offset = 0; @@ -363,17 +370,17 @@ public void testSmoke() throws Exception { config.queryParameters.order = SortOrder.Asc; }); - //Assertions + // Assertions assertNotNull(clientV3.toString()); assertEquals(clientV3.hashCode(), clientV3.hashCode()); assertEquals(2, searchResults.getCount()); - //Preparation + // Preparation clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).delete(); clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId2).delete(); { - //Execution + // Execution final ArtifactSearchResults deletedResults = clientV3.search().artifacts().get(config -> { config.queryParameters.groupId = groupId; config.queryParameters.offset = 0; @@ -381,28 +388,29 @@ public void testSmoke() throws Exception { config.queryParameters.orderby = ArtifactSortBy.Name; config.queryParameters.order = SortOrder.Asc; }); - //Assertion + // Assertion assertEquals(0, deletedResults.getCount()); } } @Test void testSearchArtifact() throws Exception { - //PReparation + // PReparation final String groupId = "testSearchArtifact"; clientV3.groups().byGroupId(groupId).artifacts().get(); String artifactId = UUID.randomUUID().toString(); String name = "n" + ThreadLocalRandom.current().nextInt(1000000); String artifactData = "{\"type\":\"record\",\"title\":\"" + name - + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + artifactData, ContentTypes.APPLICATION_JSON); createArtifact.setName(name); createArtifact.getFirstVersion().setName(name); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - //Execution + // Execution ArtifactSearchResults results = clientV3.search().artifacts().get(config -> { config.queryParameters.name = name; config.queryParameters.offset = 0; @@ -411,13 +419,13 @@ void testSearchArtifact() throws Exception { config.queryParameters.order = SortOrder.Asc; }); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(1, results.getCount()); Assertions.assertEquals(1, results.getArtifacts().size()); Assertions.assertEquals(name, results.getArtifacts().get(0).getName()); - // Try searching for *everything*. This test was added due to Issue #661 + // Try searching for *everything*. This test was added due to Issue #661 results = clientV3.search().artifacts().get(); Assertions.assertNotNull(results); Assertions.assertTrue(results.getCount() > 0); @@ -425,32 +433,37 @@ void testSearchArtifact() throws Exception { @Test void testSearchArtifactSortByCreatedOn() throws Exception { - //Preparation + // Preparation final String groupId = "testSearchArtifactSortByCreatedOn"; clientV3.groups().byGroupId(groupId).artifacts().get(); String artifactId = UUID.randomUUID().toString(); String name = "n" + ThreadLocalRandom.current().nextInt(1000000); - String data = ("{\"type\":\"record\",\"title\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + String data = ("{\"type\":\"record\",\"title\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, data, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, data, + ContentTypes.APPLICATION_JSON); createArtifact.setName(name); createArtifact.getFirstVersion().setName(name); ArtifactContent content = new ArtifactContent(); content.setContent(data); - VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); LOGGER.info("created " + amd.getArtifactId() + " - " + amd.getCreatedOn()); Thread.sleep(1500); String artifactId2 = UUID.randomUUID().toString(); - CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(artifactId2, ArtifactType.JSON, data, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(artifactId2, ArtifactType.JSON, data, + ContentTypes.APPLICATION_JSON); createArtifact2.setName(name); - VersionMetaData amd2 = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact2).getVersion(); + VersionMetaData amd2 = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact2) + .getVersion(); LOGGER.info("created " + amd2.getArtifactId() + " - " + amd2.getCreatedOn()); - //Execution + // Execution ArtifactSearchResults results = clientV3.search().artifacts().get(config -> { config.queryParameters.name = name; config.queryParameters.offset = 0; @@ -459,19 +472,21 @@ void testSearchArtifactSortByCreatedOn() throws Exception { config.queryParameters.order = SortOrder.Asc; }); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(2, results.getCount()); Assertions.assertEquals(2, results.getArtifacts().size()); -// Assertions.assertEquals(name, results.getArtifacts().get(0).getName()); + // Assertions.assertEquals(name, results.getArtifacts().get(0).getName()); LOGGER.info("search"); - LOGGER.info(results.getArtifacts().get(0).getArtifactId() + " - " + results.getArtifacts().get(0).getCreatedOn()); - LOGGER.info(results.getArtifacts().get(1).getArtifactId() + " - " + results.getArtifacts().get(1).getCreatedOn()); + LOGGER.info(results.getArtifacts().get(0).getArtifactId() + " - " + + results.getArtifacts().get(0).getCreatedOn()); + LOGGER.info(results.getArtifacts().get(1).getArtifactId() + " - " + + results.getArtifacts().get(1).getCreatedOn()); Assertions.assertEquals(artifactId, results.getArtifacts().get(0).getArtifactId()); - // Try searching for *everything*. This test was added due to Issue #661 + // Try searching for *everything*. This test was added due to Issue #661 results = clientV3.search().artifacts().get(); Assertions.assertNotNull(results); Assertions.assertTrue(results.getCount() > 0); @@ -479,23 +494,28 @@ void testSearchArtifactSortByCreatedOn() throws Exception { @Test void testSearchArtifactByIds() throws Exception { - //PReparation + // PReparation final String groupId = "testSearchArtifactByIds"; clientV3.groups().byGroupId(groupId).artifacts().get(); String artifactId = UUID.randomUUID().toString(); String name = "n" + ThreadLocalRandom.current().nextInt(1000000); - String data = "{\"type\":\"record\",\"title\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + String data = "{\"type\":\"record\",\"title\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, data, ContentTypes.APPLICATION_JSON); - VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, data, + ContentTypes.APPLICATION_JSON); + VersionMetaData amd = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); LOGGER.info("created " + amd.getArtifactId() + " - " + amd.getCreatedOn()); Thread.sleep(1500); String artifactId2 = UUID.randomUUID().toString(); - CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(artifactId2, ArtifactType.JSON, data, ContentTypes.APPLICATION_JSON); - VersionMetaData amd2 = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact2).getVersion(); + CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(artifactId2, ArtifactType.JSON, data, + ContentTypes.APPLICATION_JSON); + VersionMetaData amd2 = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact2) + .getVersion(); LOGGER.info("created " + amd2.getArtifactId() + " - " + amd2.getCreatedOn()); ArtifactSearchResults results = clientV3.search().artifacts().get(config -> { @@ -524,35 +544,42 @@ void testSearchArtifactByIds() throws Exception { Assertions.assertEquals(2, resultsByContentId.getCount()); Assertions.assertEquals(2, resultsByContentId.getArtifacts().size()); - Assertions.assertEquals(2, resultsByContentId.getArtifacts().stream() - .filter(sa -> sa.getArtifactId().equals(amd.getArtifactId()) || sa.getArtifactId().equals(amd2.getArtifactId())) - .count()); + Assertions.assertEquals(2, + resultsByContentId.getArtifacts().stream() + .filter(sa -> sa.getArtifactId().equals(amd.getArtifactId()) + || sa.getArtifactId().equals(amd2.getArtifactId())) + .count()); } @Test void testSearchVersion() throws Exception { - //Preparation + // Preparation final String groupId = "testSearchVersion"; clientV3.groups().byGroupId(groupId).artifacts().get(); String artifactId = UUID.randomUUID().toString(); String name = "n" + ThreadLocalRandom.current().nextInt(1000000); - String artifactData = "{\"type\":\"record\",\"title\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + String artifactData = "{\"type\":\"record\",\"title\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + artifactData, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setName(name); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, ContentTypes.APPLICATION_JSON); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, + ContentTypes.APPLICATION_JSON); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - //Execution - VersionSearchResults results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 2; - }); + // Execution + VersionSearchResults results = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 2; + }); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(2, results.getCount()); Assertions.assertEquals(2, results.getVersions().size()); @@ -561,7 +588,7 @@ void testSearchVersion() throws Exception { @Test void testSearchDisabledArtifacts() throws Exception { - //Preparation + // Preparation final String groupId = "testSearchDisabledArtifacts"; clientV3.groups().byGroupId(groupId).artifacts().get(); String root = "testSearchDisabledArtifact" + ThreadLocalRandom.current().nextInt(1000000); @@ -571,16 +598,19 @@ void testSearchDisabledArtifacts() throws Exception { for (int i = 0; i < 5; i++) { String artifactId = root + UUID.randomUUID().toString(); String name = root + i; - String artifactData = "{\"type\":\"record\",\"title\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + String artifactData = "{\"type\":\"record\",\"title\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); - VersionMetaData md = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + artifactData, ContentTypes.APPLICATION_JSON); + VersionMetaData md = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); artifactIds.add(artifactId); versions.add(md.getVersion()); } - //Execution + // Execution ArtifactSearchResults results = clientV3.search().artifacts().get(config -> { config.queryParameters.name = root; config.queryParameters.offset = 0; @@ -589,24 +619,25 @@ void testSearchDisabledArtifacts() throws Exception { config.queryParameters.order = SortOrder.Asc; }); -// clientV2.searchArtifacts(null, root, null, null, null, ArtifactSortBy.name, SortOrder.asc, 0, 10); + // clientV2.searchArtifacts(null, root, null, null, null, ArtifactSortBy.name, SortOrder.asc, 0, 10); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(5, results.getCount()); Assertions.assertEquals(5, results.getArtifacts().size()); - Assertions.assertTrue(results.getArtifacts().stream() - .map(SearchedArtifact::getArtifactId) + Assertions.assertTrue(results.getArtifacts().stream().map(SearchedArtifact::getArtifactId) .collect(Collectors.toList()).containsAll(artifactIds)); - //Preparation + // Preparation // Put 2 of the 5 artifacts in DISABLED state EditableVersionMetaData eamd = new EditableVersionMetaData(); eamd.setState(VersionState.DISABLED); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIds.get(0)).versions().byVersionExpression("1").put(eamd); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIds.get(3)).versions().byVersionExpression("1").put(eamd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIds.get(0)).versions() + .byVersionExpression("1").put(eamd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactIds.get(3)).versions() + .byVersionExpression("1").put(eamd); - //Execution + // Execution // Check the search results still include the DISABLED artifacts results = clientV3.search().artifacts().get(config -> { config.queryParameters.name = root; @@ -616,118 +647,129 @@ void testSearchDisabledArtifacts() throws Exception { config.queryParameters.order = SortOrder.Asc; }); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(5, results.getCount()); Assertions.assertEquals(5, results.getArtifacts().size()); - Assertions.assertTrue(results.getArtifacts().stream() - .map(SearchedArtifact::getArtifactId) + Assertions.assertTrue(results.getArtifacts().stream().map(SearchedArtifact::getArtifactId) .collect(Collectors.toList()).containsAll(artifactIds)); } @Test void testSearchDisabledVersions() throws Exception { - //Preparation + // Preparation final String groupId = "testSearchDisabledVersions"; clientV3.groups().byGroupId(groupId).artifacts().get(); String artifactId = UUID.randomUUID().toString(); String name = "testSearchDisabledVersions" + ThreadLocalRandom.current().nextInt(1000000); - String artifactData = "{\"type\":\"record\",\"title\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + String artifactData = "{\"type\":\"record\",\"title\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + artifactData, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().setName(name); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, ContentTypes.APPLICATION_JSON); + CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, + ContentTypes.APPLICATION_JSON); createVersion.setName(name); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); - - //Execution - VersionSearchResults results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 5; - }); - - //Assertions + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); + + // Execution + VersionSearchResults results = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 5; + }); + + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals(3, results.getVersions().size()); Assertions.assertTrue(results.getVersions().stream() - .allMatch(searchedVersion -> name.equals(searchedVersion.getName()) && VersionState.ENABLED.equals(searchedVersion.getState()))); + .allMatch(searchedVersion -> name.equals(searchedVersion.getName()) + && VersionState.ENABLED.equals(searchedVersion.getState()))); - //Preparation + // Preparation // Put 2 of the 3 versions in DISABLED state EditableVersionMetaData evmd = new EditableVersionMetaData(); evmd.setState(VersionState.DISABLED); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").put(evmd); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("3").put(evmd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("1").put(evmd); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("3").put(evmd); - //Execution + // Execution // Check that the search results still include the DISABLED versions - results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 5; - }); + results = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 5; + }); - //Assertions + // Assertions Assertions.assertNotNull(results); Assertions.assertEquals(3, results.getCount()); Assertions.assertEquals(3, results.getVersions().size()); Assertions.assertTrue(results.getVersions().stream() .allMatch(searchedVersion -> name.equals(searchedVersion.getName()))); Assertions.assertEquals(2, results.getVersions().stream() - .filter(searchedVersion -> VersionState.DISABLED.equals(searchedVersion.getState())) - .count()); + .filter(searchedVersion -> VersionState.DISABLED.equals(searchedVersion.getState())).count()); Assertions.assertEquals(1, results.getVersions().stream() - .filter(searchedVersion -> VersionState.ENABLED.equals(searchedVersion.getState())) - .count()); + .filter(searchedVersion -> VersionState.ENABLED.equals(searchedVersion.getState())).count()); } - @Test - public void testLabels() throws Exception { - //Preparation - final String groupId = "testLabels"; - String artifactId = generateArtifactId(); - try { - - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, "{\"name\":\"redhat\"}", ContentTypes.APPLICATION_JSON); - clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); - - EditableArtifactMetaData emd = new EditableArtifactMetaData(); - emd.setName("testProperties"); - - final Map artifactLabels = new HashMap<>(); - artifactLabels.put("extraProperty1", "value for extra property 1"); - artifactLabels.put("extraProperty2", "value for extra property 2"); - artifactLabels.put("extraProperty3", "value for extra property 3"); - var labels = new Labels(); - labels.setAdditionalData(artifactLabels); - emd.setLabels(labels); - - //Execution - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(emd); - - //Assertions - { - ArtifactMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); - Assertions.assertNotNull(artifactMetaData); - Assertions.assertEquals("testProperties", artifactMetaData.getName()); - Assertions.assertEquals(3, artifactMetaData.getLabels().getAdditionalData().size()); - Assertions.assertTrue(artifactMetaData.getLabels().getAdditionalData().keySet().containsAll(artifactLabels.keySet())); - for (String key : artifactMetaData.getLabels().getAdditionalData().keySet()) { - assertEquals(artifactMetaData.getLabels().getAdditionalData().get(key), artifactLabels.get(key)); - } - } - } finally { - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).delete(); - } - } + @Test + public void testLabels() throws Exception { + // Preparation + final String groupId = "testLabels"; + String artifactId = generateArtifactId(); + try { + + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + "{\"name\":\"redhat\"}", ContentTypes.APPLICATION_JSON); + clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); + + EditableArtifactMetaData emd = new EditableArtifactMetaData(); + emd.setName("testProperties"); + + final Map artifactLabels = new HashMap<>(); + artifactLabels.put("extraProperty1", "value for extra property 1"); + artifactLabels.put("extraProperty2", "value for extra property 2"); + artifactLabels.put("extraProperty3", "value for extra property 3"); + var labels = new Labels(); + labels.setAdditionalData(artifactLabels); + emd.setLabels(labels); + + // Execution + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).put(emd); + + // Assertions + { + ArtifactMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).get(); + Assertions.assertNotNull(artifactMetaData); + Assertions.assertEquals("testProperties", artifactMetaData.getName()); + Assertions.assertEquals(3, artifactMetaData.getLabels().getAdditionalData().size()); + Assertions.assertTrue(artifactMetaData.getLabels().getAdditionalData().keySet() + .containsAll(artifactLabels.keySet())); + for (String key : artifactMetaData.getLabels().getAdditionalData().keySet()) { + assertEquals(artifactMetaData.getLabels().getAdditionalData().get(key), + artifactLabels.get(key)); + } + } + } finally { + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).delete(); + } + } @Test void nameOrderingTest() throws Exception { - //Preparation + // Preparation final String groupId = "nameOrderingTest"; final String firstArtifactId = generateArtifactId(); final String secondArtifactId = generateArtifactId(); @@ -738,31 +780,37 @@ void nameOrderingTest() throws Exception { // Create artifact 1 String firstName = "aaaTestorder" + ThreadLocalRandom.current().nextInt(1000000); - String artifactData = "{\"type\":\"record\",\"title\":\"" + firstName + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact1 = TestUtils.clientCreateArtifact(firstArtifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); + String artifactData = "{\"type\":\"record\",\"title\":\"" + firstName + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + CreateArtifact createArtifact1 = TestUtils.clientCreateArtifact(firstArtifactId, + ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON); createArtifact1.setName(firstName); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact1); // Create artifact 2 String secondName = "bbbTestorder" + ThreadLocalRandom.current().nextInt(1000000); - String secondData = "{\"type\":\"record\",\"title\":\"" + secondName + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(secondArtifactId, ArtifactType.JSON, secondData, ContentTypes.APPLICATION_JSON); + String secondData = "{\"type\":\"record\",\"title\":\"" + secondName + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + CreateArtifact createArtifact2 = TestUtils.clientCreateArtifact(secondArtifactId, + ArtifactType.JSON, secondData, ContentTypes.APPLICATION_JSON); createArtifact2.setName(secondName); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact2); // Create artifact 3 String thirdData = "{\"openapi\":\"3.0.2\",\"info\":{\"description\":\"testorder\"}}"; - CreateArtifact createArtifact3 = TestUtils.clientCreateArtifact(thirdArtifactId, ArtifactType.JSON, thirdData, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact3 = TestUtils.clientCreateArtifact(thirdArtifactId, + ArtifactType.JSON, thirdData, ContentTypes.APPLICATION_JSON); createArtifact3.setDescription("testorder"); clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact3); { - ArtifactMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(thirdArtifactId).get(); + ArtifactMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(thirdArtifactId).get(); Assertions.assertNotNull(artifactMetaData); Assertions.assertEquals("testorder", artifactMetaData.getDescription()); } - //Execution + // Execution ArtifactSearchResults ascResults = clientV3.search().artifacts().get(config -> { config.queryParameters.offset = 0; config.queryParameters.limit = 10; @@ -772,7 +820,7 @@ void nameOrderingTest() throws Exception { config.queryParameters.order = SortOrder.Asc; }); - //Assertions + // Assertions Assertions.assertNotNull(ascResults); Assertions.assertEquals(3, ascResults.getCount()); Assertions.assertEquals(3, ascResults.getArtifacts().size()); @@ -780,7 +828,7 @@ void nameOrderingTest() throws Exception { Assertions.assertEquals(secondName, ascResults.getArtifacts().get(1).getName()); Assertions.assertNull(ascResults.getArtifacts().get(2).getName()); - //Execution + // Execution ArtifactSearchResults descResults = clientV3.search().artifacts().get(config -> { config.queryParameters.offset = 0; config.queryParameters.limit = 10; @@ -790,7 +838,7 @@ void nameOrderingTest() throws Exception { config.queryParameters.order = SortOrder.Desc; }); - //Assertions + // Assertions Assertions.assertNotNull(descResults); Assertions.assertEquals(3, descResults.getCount()); Assertions.assertEquals(3, descResults.getArtifacts().size()); @@ -807,47 +855,50 @@ void nameOrderingTest() throws Exception { @Test public void getLatestArtifact() throws Exception { - //Preparation + // Preparation final String groupId = "getLatestArtifact"; final String artifactId = generateArtifactId(); createArtifact(groupId, artifactId); - //Execution - InputStream amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + // Execution + InputStream amd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get(); - //Assertions + // Assertions assertNotNull(amd); assertEquals(ARTIFACT_CONTENT, IoUtil.toString(amd)); } @Test public void getContentById() throws Exception { - //Preparation + // Preparation final String groupId = "getContentById"; final String artifactId = generateArtifactId(); VersionMetaData amd = createArtifact(groupId, artifactId); assertNotNull(amd.getContentId()); - //Execution + // Execution InputStream content = clientV3.ids().contentIds().byContentId(amd.getContentId()).get(); - //Assertions + // Assertions assertNotNull(content); assertEquals(ARTIFACT_CONTENT, IOUtils.toString(content, StandardCharsets.UTF_8)); } @Test public void testArtifactNotFound() { - var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, () -> clientV3.groups().byGroupId(UUID.randomUUID().toString()).artifacts().byArtifactId(UUID.randomUUID().toString()).get()); + var exception = Assertions.assertThrows(io.apicurio.registry.rest.client.models.Error.class, + () -> clientV3.groups().byGroupId(UUID.randomUUID().toString()).artifacts() + .byArtifactId(UUID.randomUUID().toString()).get()); Assertions.assertEquals(404, exception.getErrorCode()); Assertions.assertEquals("ArtifactNotFoundException", exception.getName()); } @Test public void getContentByHash() throws Exception { - //Preparation + // Preparation final String groupId = "getContentByHash"; final String artifactId = generateArtifactId(); @@ -855,16 +906,16 @@ public void getContentByHash() throws Exception { createArtifact(groupId, artifactId); - //Execution + // Execution InputStream content = clientV3.ids().contentHashes().byContentHash(contentHash).get(); assertNotNull(content); - //Assertions + // Assertions String artifactContent = IOUtils.toString(content, StandardCharsets.UTF_8); assertEquals(ARTIFACT_CONTENT, artifactContent); - - //Create a second artifact using the same content but with a reference, the hash must be different but it should work. + // Create a second artifact using the same content but with a reference, the hash must be different + // but it should work. var secondArtifactId = generateArtifactId(); var artifactReference = new ArtifactReference(); @@ -877,40 +928,42 @@ public void getContentByHash() throws Exception { createArtifactWithReferences(groupId, secondArtifactId, artifactReferences); - String referencesSerialized = SqlUtil.serializeReferences(toReferenceDtos(artifactReferences.stream().map(r -> { - var ref = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); - ref.setArtifactId(r.getArtifactId()); - ref.setGroupId(r.getGroupId()); - ref.setName(r.getName()); - ref.setVersion(r.getVersion()); - return ref; - }).collect(Collectors.toList()))); + String referencesSerialized = SqlUtil + .serializeReferences(toReferenceDtos(artifactReferences.stream().map(r -> { + var ref = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); + ref.setArtifactId(r.getArtifactId()); + ref.setGroupId(r.getGroupId()); + ref.setName(r.getName()); + ref.setVersion(r.getVersion()); + return ref; + }).collect(Collectors.toList()))); - contentHash = DigestUtils.sha256Hex(concatContentAndReferences(ARTIFACT_CONTENT.getBytes(StandardCharsets.UTF_8), referencesSerialized)); + contentHash = DigestUtils.sha256Hex(concatContentAndReferences( + ARTIFACT_CONTENT.getBytes(StandardCharsets.UTF_8), referencesSerialized)); - //Execution + // Execution content = clientV3.ids().contentHashes().byContentHash(contentHash).get(); assertNotNull(content); - //Assertions + // Assertions artifactContent = IOUtils.toString(content, StandardCharsets.UTF_8); assertEquals(ARTIFACT_CONTENT, artifactContent); } @Test public void getContentByGlobalId() throws Exception { - //Preparation + // Preparation final String groupId = "getContentByGlobalId"; final String artifactId = generateArtifactId(); VersionMetaData amd = createArtifact(groupId, artifactId); - //Execution + // Execution { InputStream content = clientV3.ids().globalIds().byGlobalId(amd.getGlobalId()).get(); assertNotNull(content); - //Assertions + // Assertions String artifactContent = IOUtils.toString(content, StandardCharsets.UTF_8); assertEquals(ARTIFACT_CONTENT, artifactContent); } @@ -918,36 +971,40 @@ public void getContentByGlobalId() throws Exception { @Test public void listArtifactRules() throws Exception { - //Preparation + // Preparation final String groupId = "listArtifactRules"; final String artifactId = generateArtifactId(); createArtifact(groupId, artifactId); - final List emptyRules = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().get(); + final List emptyRules = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).rules().get(); - //Assertions + // Assertions assertNotNull(emptyRules); assertTrue(emptyRules.isEmpty()); - //Execution - createArtifactRule(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); + // Execution + createArtifactRule(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, + "BACKWARD"); - final List ruleTypes = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().get(); + final List ruleTypes = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).rules().get(); - //Assertions + // Assertions assertNotNull(ruleTypes); assertFalse(ruleTypes.isEmpty()); } @Test public void testCompatibilityWithReferences() throws Exception { - //Preparation + // Preparation final String groupId = "testCompatibilityWithReferences"; final String artifactId = generateArtifactId(); - //First create the references schema - createArtifact(groupId, artifactId, ArtifactType.AVRO, REFERENCED_SCHEMA, ContentTypes.APPLICATION_JSON); + // First create the references schema + createArtifact(groupId, artifactId, ArtifactType.AVRO, REFERENCED_SCHEMA, + ContentTypes.APPLICATION_JSON); io.apicurio.registry.rest.v3.beans.ArtifactReference artifactReference = new io.apicurio.registry.rest.v3.beans.ArtifactReference(); artifactReference.setArtifactId(artifactId); @@ -956,12 +1013,15 @@ public void testCompatibilityWithReferences() throws Exception { artifactReference.setName("com.example.common.ItemId"); final String secondArtifactId = generateArtifactId(); - createArtifactWithReferences(groupId, secondArtifactId, ArtifactType.AVRO, SCHEMA_WITH_REFERENCE, ContentTypes.APPLICATION_JSON, List.of(artifactReference)); + createArtifactWithReferences(groupId, secondArtifactId, ArtifactType.AVRO, SCHEMA_WITH_REFERENCE, + ContentTypes.APPLICATION_JSON, List.of(artifactReference)); - //Create rule - createArtifactRule(groupId, secondArtifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); + // Create rule + createArtifactRule(groupId, secondArtifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, + "BACKWARD"); - createArtifactVersionExtendedRaw(groupId, secondArtifactId, SCHEMA_WITH_REFERENCE, ContentTypes.APPLICATION_JSON, List.of(artifactReference)); + createArtifactVersionExtendedRaw(groupId, secondArtifactId, SCHEMA_WITH_REFERENCE, + ContentTypes.APPLICATION_JSON, List.of(artifactReference)); clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).delete(); clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(secondArtifactId).delete(); @@ -969,45 +1029,48 @@ public void testCompatibilityWithReferences() throws Exception { @Test public void deleteArtifactRules() throws Exception { - //Preparation + // Preparation final String groupId = "deleteArtifactRules"; final String artifactId = generateArtifactId(); prepareRuleTest(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - //Execution + // Execution clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().delete(); - //Assertions - final List emptyRules = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().get(); + // Assertions + final List emptyRules = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).rules().get(); assertNotNull(emptyRules); assertTrue(emptyRules.isEmpty()); } @Test public void getArtifactRuleConfig() throws Exception { - //Preparation + // Preparation final String groupId = "getArtifactRuleConfig"; final String artifactId = generateArtifactId(); prepareRuleTest(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - //Execution - final Rule rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); - //Assertions + // Execution + final Rule rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .byRuleType(RuleType.COMPATIBILITY.name()).get(); + // Assertions assertNotNull(rule); assertEquals("BACKWARD", rule.getConfig()); } @Test public void updateArtifactRuleConfig() throws Exception { - //Preparation + // Preparation final String groupId = "updateArtifactRuleConfig"; final String artifactId = generateArtifactId(); prepareRuleTest(groupId, artifactId, io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - final Rule rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); + final Rule rule = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .byRuleType(RuleType.COMPATIBILITY.name()).get(); assertNotNull(rule); assertEquals("BACKWARD", rule.getConfig()); @@ -1015,10 +1078,11 @@ public void updateArtifactRuleConfig() throws Exception { toUpdate.setRuleType(RuleType.COMPATIBILITY); toUpdate.setConfig("FULL"); - //Execution - final Rule updated = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().byRuleType(RuleType.COMPATIBILITY.name()).put(toUpdate); + // Execution + final Rule updated = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .byRuleType(RuleType.COMPATIBILITY.name()).put(toUpdate); - //Assertions + // Assertions assertNotNull(updated); assertEquals("FULL", updated.getConfig()); } @@ -1026,7 +1090,7 @@ public void updateArtifactRuleConfig() throws Exception { @Test public void testUpdateArtifact() throws Exception { - //Preparation + // Preparation final String groupId = "testUpdateArtifact"; final String artifactId = generateArtifactId(); @@ -1036,17 +1100,21 @@ public void testUpdateArtifact() throws Exception { final String name = "testUpdateArtifactName"; final String description = "testUpdateArtifactDescription"; - //Execution - CreateVersion createVersion = TestUtils.clientCreateVersion(updatedContent, ContentTypes.APPLICATION_JSON); + // Execution + CreateVersion createVersion = TestUtils.clientCreateVersion(updatedContent, + ContentTypes.APPLICATION_JSON); createVersion.setName(name); createVersion.setDescription(description); createVersion.setVersion(version); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - //Assertions - assertEquals(updatedContent, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + // Assertions + assertEquals(updatedContent, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); - VersionMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData artifactMetaData = clientV3.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); assertNotNull(artifactMetaData); assertEquals(version, artifactMetaData.getVersion()); assertEquals(name, artifactMetaData.getName()); @@ -1055,7 +1123,7 @@ public void testUpdateArtifact() throws Exception { @Test public void testUpdateYamlArtifact() throws Exception { - //Preparation + // Preparation final String groupId = "testUpdateYamlArtifact"; final String artifactId = generateArtifactId(); @@ -1064,17 +1132,22 @@ public void testUpdateYamlArtifact() throws Exception { final String name = "testUpdateYamlArtifactName"; final String description = "testUpdateYamlArtifactDescription"; - //Execution - CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_OPENAPI_YAML_CONTENT, ContentTypes.APPLICATION_YAML); + // Execution + CreateVersion createVersion = TestUtils.clientCreateVersion(UPDATED_OPENAPI_YAML_CONTENT, + ContentTypes.APPLICATION_YAML); createVersion.setName(name); createVersion.setDescription(description); createVersion.setVersion(version); - clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(createVersion); - //Assertions - assertMultilineTextEquals(UPDATED_OPENAPI_YAML_CONTENT, IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get())); + // Assertions + assertMultilineTextEquals(UPDATED_OPENAPI_YAML_CONTENT, + IoUtil.toString(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get())); - VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData vmd = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); assertNotNull(vmd); assertEquals(version, vmd.getVersion()); assertEquals(name, vmd.getName()); @@ -1083,7 +1156,7 @@ public void testUpdateYamlArtifact() throws Exception { @Test public void deleteArtifactsInGroup() throws Exception { - //Preparation + // Preparation final String groupId = "deleteArtifactsInGroup"; final String firstArtifactId = generateArtifactId(); final String secondArtifactId = generateArtifactId(); @@ -1094,19 +1167,19 @@ public void deleteArtifactsInGroup() throws Exception { assertFalse(searchResults.getArtifacts().isEmpty()); assertEquals(2, (int) searchResults.getCount()); - //Execution + // Execution clientV3.groups().byGroupId(groupId).artifacts().delete(); final ArtifactSearchResults deleted = clientV3.groups().byGroupId(groupId).artifacts().get(); - //Assertions + // Assertions assertTrue(deleted.getArtifacts().isEmpty()); assertEquals(0, (int) deleted.getCount()); } @Test public void searchArtifactsByContent() throws Exception { - //Preparation + // Preparation final String groupId = "searchArtifactsByContent"; final String firstArtifactId = generateArtifactId(); final String secondArtifactId = generateArtifactId(); @@ -1115,10 +1188,11 @@ public void searchArtifactsByContent() throws Exception { createArtifact(groupId, firstArtifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON); createArtifact(groupId, secondArtifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON); - //Execution - final ArtifactSearchResults searchResults = clientV3.search().artifacts().post(IoUtil.toStream(content), "application/create.extended+json"); + // Execution + final ArtifactSearchResults searchResults = clientV3.search().artifacts() + .post(IoUtil.toStream(content), "application/create.extended+json"); - //Assertions + // Assertions assertEquals(2, searchResults.getCount()); } @@ -1139,48 +1213,51 @@ public void smokeGlobalRules() throws Exception { @Test public void getGlobalRuleConfig() throws Exception { - //Preparation + // Preparation createGlobalRule(io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - //Execution - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); - //Assertions + // Execution + final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()) + .get(); + // Assertions assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); } @Test public void updateGlobalRuleConfig() throws Exception { - //Preparation + // Preparation createGlobalRule(io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); + final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()) + .get(); assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); final Rule toUpdate = new Rule(); toUpdate.setRuleType(RuleType.COMPATIBILITY); toUpdate.setConfig("FORWARD"); - //Execution + // Execution final Rule updated = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).put(toUpdate); - //Assertions + // Assertions assertEquals(updated.getConfig(), "FORWARD"); } @Test public void deleteGlobalRule() throws Exception { - //Preparation + // Preparation createGlobalRule(io.apicurio.registry.types.RuleType.COMPATIBILITY, "BACKWARD"); - final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); + final Rule globalRuleConfig = clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()) + .get(); assertEquals(globalRuleConfig.getConfig(), "BACKWARD"); - //Execution + // Execution clientV3.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).delete(); final List ruleTypes = clientV3.admin().rules().get(); - //Assertions + // Assertions assertEquals(0, ruleTypes.size()); } @@ -1204,65 +1281,68 @@ public void testDefaultGroup() throws Exception { }); SearchedArtifact artifact1 = result.getArtifacts().stream() - .filter(s -> s.getArtifactId().equals(artifactId1)) - .findFirst() - .orElseThrow(); + .filter(s -> s.getArtifactId().equals(artifactId1)).findFirst().orElseThrow(); assertNull(artifact1.getGroupId()); SearchedArtifact artifact2 = result.getArtifacts().stream() - .filter(s -> s.getArtifactId().equals(artifactId2)) - .findFirst() - .orElseThrow(); + .filter(s -> s.getArtifactId().equals(artifactId2)).findFirst().orElseThrow(); assertNull(artifact2.getGroupId()); SearchedArtifact artifact3 = result.getArtifacts().stream() - .filter(s -> s.getArtifactId().equals(artifactId3)) - .findFirst() - .orElseThrow(); + .filter(s -> s.getArtifactId().equals(artifactId3)).findFirst().orElseThrow(); assertEquals(dummyGroup, artifact3.getGroupId()); } private void verifyGroupNullInMetadata(String artifactId, String content) throws Exception { - ArtifactMetaData meta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).get(); + ArtifactMetaData meta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifacts().byArtifactId(artifactId).get(); assertTrue(new GroupId(meta.getGroupId()).isDefaultGroup()); - VersionMetaData vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); assertTrue(new GroupId(vmeta.getGroupId()).isDefaultGroup()); - vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).versions().byVersionExpression(vmeta.getVersion()).get(); + vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression(vmeta.getVersion()).get(); assertTrue(new GroupId(vmeta.getGroupId()).isDefaultGroup()); CreateVersion createVersion = TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON); - vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().byArtifactId(artifactId).versions().post(createVersion); + vmeta = clientV3.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts() + .byArtifactId(artifactId).versions().post(createVersion); assertTrue(new GroupId(vmeta.getGroupId()).isDefaultGroup()); - clientV3.groups().byGroupId("default").artifacts().get().getArtifacts() - .stream() + clientV3.groups().byGroupId("default").artifacts().get().getArtifacts().stream() .filter(s -> s.getArtifactId().equals(artifactId)) .forEach(s -> assertTrue(new GroupId(s.getGroupId()).isDefaultGroup())); } private VersionMetaData createArtifact(String groupId, String artifactId) throws Exception { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); createArtifact.setName(artifactId); - final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); return checkArtifact(groupId, artifactId, created); } - private VersionMetaData createArtifactWithReferences(String groupId, String artifactId, List artifactReferences) throws Exception { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); + private VersionMetaData createArtifactWithReferences(String groupId, String artifactId, + List artifactReferences) throws Exception { + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + ARTIFACT_CONTENT, ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().getContent().setReferences(artifactReferences); createArtifact.setName(artifactId); - final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); return checkArtifact(groupId, artifactId, created); } - private VersionMetaData checkArtifact(String groupId, String artifactId, VersionMetaData created) throws Exception { + private VersionMetaData checkArtifact(String groupId, String artifactId, VersionMetaData created) + throws Exception { assertNotNull(created); if (new GroupId(groupId).isDefaultGroup()) { assertNull(created.getGroupId()); @@ -1274,13 +1354,17 @@ private VersionMetaData checkArtifact(String groupId, String artifactId, Version return created; } - private @NotNull VersionMetaData createOpenAPIArtifact(String groupId, String artifactId) throws Exception { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, ARTIFACT_OPENAPI_JSON_CONTENT, ContentTypes.APPLICATION_JSON); - final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + private @NotNull VersionMetaData createOpenAPIArtifact(String groupId, String artifactId) + throws Exception { + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.OPENAPI, + ARTIFACT_OPENAPI_JSON_CONTENT, ContentTypes.APPLICATION_JSON); + final VersionMetaData created = clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); return checkArtifact(groupId, artifactId, created); } - private void prepareRuleTest(String groupId, String artifactId, io.apicurio.registry.types.RuleType ruleType, String ruleConfig) throws Exception { + private void prepareRuleTest(String groupId, String artifactId, + io.apicurio.registry.types.RuleType ruleType, String ruleConfig) throws Exception { createArtifact(groupId, artifactId); createArtifactRule(groupId, artifactId, ruleType, ruleConfig); } @@ -1376,10 +1460,12 @@ public void testConfigProperties() throws Exception { // Start with all default values List configProperties = clientV3.admin().config().properties().get(); Assertions.assertFalse(configProperties.isEmpty()); - Optional anonymousRead = configProperties.stream().filter(cp -> cp.getName().equals(property1Name)).findFirst(); + Optional anonymousRead = configProperties.stream() + .filter(cp -> cp.getName().equals(property1Name)).findFirst(); Assertions.assertTrue(anonymousRead.isPresent()); Assertions.assertEquals("false", anonymousRead.get().getValue()); - Optional obacLimit = configProperties.stream().filter(cp -> cp.getName().equals(property2Name)).findFirst(); + Optional obacLimit = configProperties.stream() + .filter(cp -> cp.getName().equals(property2Name)).findFirst(); Assertions.assertTrue(obacLimit.isPresent()); Assertions.assertEquals("true", obacLimit.get().getValue()); @@ -1389,7 +1475,8 @@ public void testConfigProperties() throws Exception { clientV3.admin().config().properties().byPropertyName(property1Name).put(updateProp); // Verify the property was set. - ConfigurationProperty prop = clientV3.admin().config().properties().byPropertyName(property1Name).get(); + ConfigurationProperty prop = clientV3.admin().config().properties().byPropertyName(property1Name) + .get(); Assertions.assertEquals(property1Name, prop.getName()); Assertions.assertEquals("true", prop.getValue()); @@ -1406,7 +1493,7 @@ public void testConfigProperties() throws Exception { prop = clientV3.admin().config().properties().byPropertyName(property2Name).get(); Assertions.assertEquals(property2Name, prop.getName()); Assertions.assertEquals("false", prop.getValue()); - + properties = clientV3.admin().config().properties().get(); prop = properties.stream().filter(cp -> cp.getName().equals(property2Name)).findFirst().get(); Assertions.assertEquals("false", prop.getValue()); @@ -1430,7 +1517,7 @@ public void testConfigProperties() throws Exception { prop = clientV3.admin().config().properties().byPropertyName(property1Name).get(); Assertions.assertEquals(property1Name, prop.getName()); Assertions.assertEquals("false", prop.getValue()); - + properties = clientV3.admin().config().properties().get(); prop = properties.stream().filter(cp -> cp.getName().equals(property1Name)).findFirst().get(); Assertions.assertEquals(property1Name, prop.getName()); @@ -1448,7 +1535,8 @@ public void testConfigProperties() throws Exception { // Try to set a Long property to "foobar" (should be invalid type) var exception2 = Assertions.assertThrows(ApiException.class, () -> { updateProp.setValue("foobar"); - clientV3.admin().config().properties().byPropertyName("apicurio.download.href.ttl.seconds").put(updateProp); + clientV3.admin().config().properties().byPropertyName("apicurio.download.href.ttl.seconds") + .put(updateProp); }); // InvalidPropertyValueException Assertions.assertEquals(400, exception2.getResponseStatusCode()); @@ -1461,14 +1549,16 @@ public void testForceArtifactType() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, artifactContent, ContentTypes.APPLICATION_JSON); - /*var postReq = */clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + artifactContent, ContentTypes.APPLICATION_JSON); + /* var postReq = */clientV3.groups().byGroupId(groupId).artifacts().post(createArtifact); var meta = clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); assertEquals(ArtifactType.AVRO, meta.getArtifactType()); - assertTrue(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + assertTrue(clientV3.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); } @Test @@ -1478,16 +1568,22 @@ public void testClientRateLimitError() { try { var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); adapter.setBaseUrl(mock.getMockUrl()); - io.apicurio.registry.rest.client.RegistryClient client = new io.apicurio.registry.rest.client.RegistryClient(adapter); + io.apicurio.registry.rest.client.RegistryClient client = new io.apicurio.registry.rest.client.RegistryClient( + adapter); - var execution1 = Assertions.assertThrows(ApiException.class, () -> client.groups().byGroupId("test").artifacts().byArtifactId("test").get()); + var execution1 = Assertions.assertThrows(ApiException.class, + () -> client.groups().byGroupId("test").artifacts().byArtifactId("test").get()); Assertions.assertEquals(429, execution1.getResponseStatusCode()); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact("aaa", ArtifactType.JSON, "{}", ContentTypes.APPLICATION_JSON); - var exception2 = Assertions.assertThrows(ApiException.class, () -> client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().post(createArtifact)); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact("aaa", ArtifactType.JSON, "{}", + ContentTypes.APPLICATION_JSON); + var exception2 = Assertions.assertThrows(ApiException.class, + () -> client.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifacts().post(createArtifact)); Assertions.assertEquals(429, exception2.getResponseStatusCode()); - var exception3 = Assertions.assertThrows(ApiException.class, () -> client.ids().globalIds().byGlobalId(5L).get()); + var exception3 = Assertions.assertThrows(ApiException.class, + () -> client.ids().globalIds().byGlobalId(5L).get()); Assertions.assertEquals(429, exception3.getResponseStatusCode()); } finally { mock.stop(); diff --git a/app/src/test/java/io/apicurio/registry/rest/DisableApisFlagsTest.java b/app/src/test/java/io/apicurio/registry/rest/DisableApisFlagsTest.java index 6a34556ece..d711c67763 100644 --- a/app/src/test/java/io/apicurio/registry/rest/DisableApisFlagsTest.java +++ b/app/src/test/java/io/apicurio/registry/rest/DisableApisFlagsTest.java @@ -60,71 +60,47 @@ private void doTestArtifactVersionDeletionDisabled() throws Exception { String artifactContent = resourceToString("openapi-empty.json"); // Create OpenAPI artifact - createArtifact(GROUP, "testDeleteArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, io.apicurio.registry.types.ContentTypes.APPLICATION_JSON); + createArtifact(GROUP, "testDeleteArtifactVersion/EmptyAPI", ArtifactType.OPENAPI, artifactContent, + io.apicurio.registry.types.ContentTypes.APPLICATION_JSON); // Make sure we can get the artifact content - given() - .when() - .pathParam("groupId", GROUP) + given().when().pathParam("groupId", GROUP) .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/branch=latest/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) + .then().statusCode(200).body("openapi", equalTo("3.0.2")) .body("info.title", equalTo("Empty API")); - //Get the artifact version 1 - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "1") - .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content") - .then() - .statusCode(200) - .body("openapi", equalTo("3.0.2")) - .body("info.title", equalTo("Empty API")); + // Get the artifact version 1 + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "1") + .get("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}/content").then() + .statusCode(200).body("openapi", equalTo("3.0.2")).body("info.title", equalTo("Empty API")); // Try to delete artifact version 1. Should return 405 as feature is disabled - given() - .when() - .pathParam("groupId", GROUP) - .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI") - .pathParam("version", "1") - .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}") - .then() + given().when().pathParam("groupId", GROUP) + .pathParam("artifactId", "testDeleteArtifactVersion/EmptyAPI").pathParam("version", "1") + .delete("/registry/v3/groups/{groupId}/artifacts/{artifactId}/versions/{version}").then() .statusCode(405) .body("message", equalTo("Artifact version deletion operation is not enabled.")); } private void doTestUIDisabled() { - given() - .baseUri("http://localhost:" + this.testPort ) - .when() - .get("/ui") - .then() - .statusCode(404); + given().baseUri("http://localhost:" + this.testPort).when().get("/ui").then().statusCode(404); } private static void doTestDisabledSubPathRegexp(boolean disabledDirectAccess) { - //this should return http 404, it's disabled - given() - .when() - .contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) + // this should return http 404, it's disabled + given().when().contentType(ContentTypes.COMPAT_SCHEMA_REGISTRY_STABLE_LATEST) .body(CCompatTestConstants.SCHEMA_SIMPLE_WRAPPED) - .post("/ccompat/v7/subjects/{subject}/versions", UUID.randomUUID().toString()) - .then() + .post("/ccompat/v7/subjects/{subject}/versions", UUID.randomUUID().toString()).then() .statusCode(404); - var req = given() - .when().contentType(CT_JSON).get("/ccompat/v7/subjects") - .then(); + var req = given().when().contentType(CT_JSON).get("/ccompat/v7/subjects").then(); if (disabledDirectAccess) { req.statusCode(404); } else { - //this should return http 200, it's not disabled - req.statusCode(200) - .body(anything()); + // this should return http 200, it's not disabled + req.statusCode(200).body(anything()); } } @@ -132,14 +108,11 @@ private static void doTestDisabledChildPathByParentPath(boolean disabledDirectAc String artifactContent = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"; String schemaId = TestUtils.generateArtifactId(); - CreateArtifact createArtifact = TestUtils.serverCreateArtifact(schemaId, ArtifactType.AVRO, artifactContent, io.apicurio.registry.types.ContentTypes.APPLICATION_JSON); - var req = given() - .when() - .contentType(CT_JSON) - .pathParam("groupId", GroupId.DEFAULT.getRawGroupIdWithDefaultString()) - .body(createArtifact) - .post("/registry/v3/groups/{groupId}/artifacts") - .then(); + CreateArtifact createArtifact = TestUtils.serverCreateArtifact(schemaId, ArtifactType.AVRO, + artifactContent, io.apicurio.registry.types.ContentTypes.APPLICATION_JSON); + var req = given().when().contentType(CT_JSON) + .pathParam("groupId", GroupId.DEFAULT.getRawGroupIdWithDefaultString()).body(createArtifact) + .post("/registry/v3/groups/{groupId}/artifacts").then(); if (disabledDirectAccess) { req.statusCode(404); @@ -148,5 +121,4 @@ private static void doTestDisabledChildPathByParentPath(boolean disabledDirectAc } } - } diff --git a/app/src/test/java/io/apicurio/registry/rest/DisableApisTestProfile.java b/app/src/test/java/io/apicurio/registry/rest/DisableApisTestProfile.java index fc79ca4786..5963500d35 100644 --- a/app/src/test/java/io/apicurio/registry/rest/DisableApisTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/rest/DisableApisTestProfile.java @@ -1,19 +1,18 @@ package io.apicurio.registry.rest; +import io.quarkus.test.junit.QuarkusTestProfile; + import java.util.HashMap; import java.util.Map; -import io.quarkus.test.junit.QuarkusTestProfile; - public class DisableApisTestProfile implements QuarkusTestProfile { @Override public Map getConfigOverrides() { Map props = new HashMap<>(); - props.put("apicurio.disable.apis","/apis/ccompat/v7/subjects/[^/]+/versions.*,/ui/.*"); + props.put("apicurio.disable.apis", "/apis/ccompat/v7/subjects/[^/]+/versions.*,/ui/.*"); props.put("apicurio.rest.artifact.deletion.enabled", "false"); return props; } } - diff --git a/app/src/test/java/io/apicurio/registry/rest/MultipleRequestFiltersTestProfile.java b/app/src/test/java/io/apicurio/registry/rest/MultipleRequestFiltersTestProfile.java index 28e7684619..6f86742f37 100644 --- a/app/src/test/java/io/apicurio/registry/rest/MultipleRequestFiltersTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/rest/MultipleRequestFiltersTestProfile.java @@ -1,10 +1,10 @@ package io.apicurio.registry.rest; +import io.quarkus.test.junit.QuarkusTestProfile; + import java.util.HashMap; import java.util.Map; -import io.quarkus.test.junit.QuarkusTestProfile; - public class MultipleRequestFiltersTestProfile implements QuarkusTestProfile { @Override diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitOpsSmokeTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitOpsSmokeTest.java index aaf186e328..2d4b588917 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitOpsSmokeTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitOpsSmokeTest.java @@ -35,7 +35,6 @@ class GitOpsSmokeTest { @Current RegistryStorage storage; - @Test void smokeTest() throws Exception { assertEquals(Set.of(), storage.getArtifactIds(10)); @@ -44,8 +43,8 @@ void smokeTest() throws Exception { // Waiting to load smoke01 testRepository.load("git/smoke01"); - await().atMost(Duration.ofSeconds(30)) - .until(() -> withContext(() -> storage.getArtifactIds(10)), equalTo(Set.of("petstore"))); + await().atMost(Duration.ofSeconds(30)).until(() -> withContext(() -> storage.getArtifactIds(10)), + equalTo(Set.of("petstore"))); // Global rules assertEquals(Set.of(RuleType.VALIDITY), Set.copyOf(storage.getGlobalRules())); @@ -56,19 +55,21 @@ void smokeTest() throws Exception { // Artifact rules assertEquals(Set.of(RuleType.COMPATIBILITY), Set.copyOf(storage.getArtifactRules("foo", "petstore"))); - assertEquals("BACKWARD", storage.getArtifactRule("foo", "petstore", RuleType.COMPATIBILITY).getConfiguration()); + assertEquals("BACKWARD", + storage.getArtifactRule("foo", "petstore", RuleType.COMPATIBILITY).getConfiguration()); // Artifact versions var version = storage.getArtifactVersionContent("foo", "petstore", "1"); assertEquals(1, version.getGlobalId()); assertEquals(1, version.getContentId()); var content = loadFile("git/smoke01/content/petstore-1.0.0.yaml"); - assertEquals(YAMLObjectMapper.MAPPER.readTree(content.bytes()), MAPPER.readTree(version.getContent().bytes())); + assertEquals(YAMLObjectMapper.MAPPER.readTree(content.bytes()), + MAPPER.readTree(version.getContent().bytes())); // Waiting to load smoke02 testRepository.load("git/smoke02"); - await().atMost(Duration.ofSeconds(30)) - .until(() -> withContext(() -> storage.getArtifactIds(10)), equalTo(Set.of("person"))); + await().atMost(Duration.ofSeconds(30)).until(() -> withContext(() -> storage.getArtifactIds(10)), + equalTo(Set.of("person"))); // Global rules assertEquals(Set.of(), Set.copyOf(storage.getGlobalRules())); @@ -88,20 +89,19 @@ void smokeTest() throws Exception { // Waiting to load empty testRepository.load("git/empty"); - await().atMost(Duration.ofSeconds(30)) - .until(() -> withContext(() -> storage.getArtifactIds(10)), equalTo(Set.of())); + await().atMost(Duration.ofSeconds(30)).until(() -> withContext(() -> storage.getArtifactIds(10)), + equalTo(Set.of())); } - @ActivateRequestContext public T withContext(Supplier supplier) { return supplier.get(); } - private ContentHandle loadFile(String path) { try { - var fullPath = Path.of(requireNonNull(Thread.currentThread().getContextClassLoader().getResource(path)).toURI()); + var fullPath = Path.of( + requireNonNull(Thread.currentThread().getContextClassLoader().getResource(path)).toURI()); return ContentHandle.create(FileUtils.readFileToByteArray(fullPath.toFile())); } catch (IOException | URISyntaxException ex) { throw new RuntimeException(ex); diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepository.java b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepository.java index 715e877860..03605b1f74 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepository.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepository.java @@ -24,22 +24,14 @@ public class GitTestRepository implements AutoCloseable { @Getter private String gitRepoBranch; - public void initialize() { try { var gitDir = Files.createTempDirectory(null); gitRepoBranch = "main"; - git = Git.init() - .setDirectory(gitDir.toFile()) - .setInitialBranch(gitRepoBranch) - .call(); + git = Git.init().setDirectory(gitDir.toFile()).setInitialBranch(gitRepoBranch).call(); Files.write(gitDir.resolve(".init"), "init".getBytes(StandardCharsets.UTF_8)); - git.add() - .addFilepattern(".") - .call(); - git.commit() - .setMessage("Initial commit") - .call(); + git.add().addFilepattern(".").call(); + git.commit().setMessage("Initial commit").call(); gitRepoUrl = "file://" + git.getRepository().getWorkTree().getAbsolutePath(); } catch (IOException | GitAPIException e) { @@ -47,10 +39,11 @@ public void initialize() { } } - public void load(String sourceDir) { try { - var sourcePath = Path.of(requireNonNull(Thread.currentThread().getContextClassLoader().getResource(sourceDir)).toURI()); + var sourcePath = Path + .of(requireNonNull(Thread.currentThread().getContextClassLoader().getResource(sourceDir)) + .toURI()); var files = FileUtils.listFiles(git.getRepository().getWorkTree(), null, true); for (File f : files) { var prefix = Path.of(git.getRepository().getWorkTree().getPath(), ".git"); @@ -59,23 +52,15 @@ public void load(String sourceDir) { } } FileUtils.copyDirectory(sourcePath.toFile(), git.getRepository().getWorkTree()); - git.add() - .setUpdate(true) - .addFilepattern(".") - .call(); - git.add() - .addFilepattern(".") - .call(); - git.commit() - .setMessage("test") - .call(); + git.add().setUpdate(true).addFilepattern(".").call(); + git.add().addFilepattern(".").call(); + git.commit().setMessage("test").call(); } catch (IOException | GitAPIException | URISyntaxException e) { throw new RuntimeException(e); } } - @Override public void close() throws Exception { if (git != null) { diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepositoryManager.java b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepositoryManager.java index d3f09e5437..71d02b0165 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepositoryManager.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/gitops/GitTestRepositoryManager.java @@ -15,15 +15,11 @@ public Map start() { testRepository = new GitTestRepository(); testRepository.initialize(); - return Map.of( - "apicurio.gitops.id", "test", - "apicurio.gitops.repo.origin.uri", testRepository.getGitRepoUrl(), - "apicurio.gitops.repo.origin.branch", testRepository.getGitRepoBranch(), - "apicurio.gitops.refresh.every", "5s" - ); + return Map.of("apicurio.gitops.id", "test", "apicurio.gitops.repo.origin.uri", + testRepository.getGitRepoUrl(), "apicurio.gitops.repo.origin.branch", + testRepository.getGitRepoBranch(), "apicurio.gitops.refresh.every", "5s"); } - @Override public void stop() { try { diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorageTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorageTest.java index a86d7520a7..52990dfaac 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorageTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlRegistryStorageTest.java @@ -12,10 +12,10 @@ @TestProfile(KafkasqlTestProfile.class) @Typed(KafkaSqlRegistryStorageTest.class) public class KafkaSqlRegistryStorageTest extends AbstractRegistryStorageTest { - + @Inject KafkaSqlRegistryStorage storage; - + /** * @see AbstractRegistryStorageTest#storage() */ @@ -23,5 +23,5 @@ public class KafkaSqlRegistryStorageTest extends AbstractRegistryStorageTest { protected RegistryStorage storage() { return storage; } - + } diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSnapshotTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSnapshotTest.java index d0d34fe14f..4b4a950011 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSnapshotTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/kafkasql/KafkaSqlSnapshotTest.java @@ -31,20 +31,21 @@ public class KafkaSqlSnapshotTest extends AbstractResourceTestBase { @BeforeAll public void init() { - //Create a bunch of artifacts and rules, so they're added on top of the snapshot. + // Create a bunch of artifacts and rules, so they're added on top of the snapshot. String simpleAvro = resourceToString("avro.json"); for (int idx = 0; idx < 1000; idx++) { System.out.println("Iteration: " + idx); String artifactId = UUID.randomUUID().toString(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, simpleAvro, - ContentTypes.APPLICATION_JSON); - clientV3.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts() - .post(createArtifact, config -> config.headers.add("X-Registry-ArtifactId", artifactId)); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + simpleAvro, ContentTypes.APPLICATION_JSON); + clientV3.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().post(createArtifact, + config -> config.headers.add("X-Registry-ArtifactId", artifactId)); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - clientV3.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().byArtifactId(artifactId).rules().post(createRule); + clientV3.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts() + .byArtifactId(artifactId).rules().post(createRule); } } diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/readonly/ReadOnlyRegistryStorageTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/readonly/ReadOnlyRegistryStorageTest.java index 5bccdbe30a..f54eb070f6 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/readonly/ReadOnlyRegistryStorageTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/readonly/ReadOnlyRegistryStorageTest.java @@ -32,35 +32,46 @@ public class ReadOnlyRegistryStorageTest { private static final Map EXPECTED_METHODS; private static final Set CURRENT_METHODS; - static { - EXPECTED_METHODS = Map.ofEntries( + EXPECTED_METHODS = Map. ofEntries( // Keep alphabetical - entry("appendVersionToBranch3", new State(true, s -> s.appendVersionToBranch(null, null, null))), + entry("appendVersionToBranch3", + new State(true, s -> s.appendVersionToBranch(null, null, null))), entry("consumeDownload1", new State(true, s -> s.consumeDownload(null))), entry("contentIdFromHash1", new State(false, s -> s.contentIdFromHash(null))), entry("countArtifacts0", new State(false, RegistryStorage::countArtifacts)), entry("countArtifactVersions2", new State(false, s -> s.countArtifactVersions(null, null))), - entry("countActiveArtifactVersions2", new State(false, s -> s.countActiveArtifactVersions(null, null))), - entry("countTotalArtifactVersions0", new State(false, RegistryStorage::countTotalArtifactVersions)), - entry("createArtifact8", new State(true, s -> s.createArtifact(null, null, null, null, null, null, null, null))), - entry("createArtifactRule4", new State(true, s -> s.createArtifactRule(null, null, null, null))), - entry("createArtifactVersionComment4", new State(true, s -> s.createArtifactVersionComment(null, null, null, null))), - entry("createArtifactVersion7", new State(true, s -> s.createArtifactVersion(null, null, null, null, null, null, null))), + entry("countActiveArtifactVersions2", + new State(false, s -> s.countActiveArtifactVersions(null, null))), + entry("countTotalArtifactVersions0", + new State(false, RegistryStorage::countTotalArtifactVersions)), + entry("createArtifact8", + new State(true, + s -> s.createArtifact(null, null, null, null, null, null, null, null))), + entry("createArtifactRule4", + new State(true, s -> s.createArtifactRule(null, null, null, null))), + entry("createArtifactVersionComment4", + new State(true, s -> s.createArtifactVersionComment(null, null, null, null))), + entry("createArtifactVersion7", + new State(true, + s -> s.createArtifactVersion(null, null, null, null, null, null, null))), entry("createBranch4", new State(true, s -> s.createBranch(null, null, null, null))), entry("createDownload1", new State(true, s -> s.createDownload(null))), entry("createGlobalRule2", new State(true, s -> s.createGlobalRule(null, null))), entry("createGroup1", new State(true, s -> s.createGroup(null))), entry("createRoleMapping3", new State(true, s -> s.createRoleMapping(null, null, null))), - entry("deleteAllExpiredDownloads0", new State(true, RegistryStorage::deleteAllExpiredDownloads)), + entry("deleteAllExpiredDownloads0", + new State(true, RegistryStorage::deleteAllExpiredDownloads)), entry("deleteAllUserData0", new State(true, RegistryStorage::deleteAllUserData)), entry("deleteArtifact2", new State(true, s -> s.deleteArtifact(null, null))), entry("deleteBranch2", new State(true, s -> s.deleteBranch(null, null))), entry("deleteArtifactRule3", new State(true, s -> s.deleteArtifactRule(null, null, null))), entry("deleteArtifactRules2", new State(true, s -> s.deleteArtifactRules(null, null))), entry("deleteArtifacts1", new State(true, s -> s.deleteArtifacts(null))), - entry("deleteArtifactVersion3", new State(true, s -> s.deleteArtifactVersion(null, null, null))), - entry("deleteArtifactVersionComment4", new State(true, s -> s.deleteArtifactVersionComment(null, null, null, null))), + entry("deleteArtifactVersion3", + new State(true, s -> s.deleteArtifactVersion(null, null, null))), + entry("deleteArtifactVersionComment4", + new State(true, s -> s.deleteArtifactVersionComment(null, null, null, null))), entry("deleteConfigProperty1", new State(true, s -> s.deleteConfigProperty("test"))), entry("deleteGlobalRule1", new State(true, s -> s.deleteGlobalRule(null))), entry("deleteGlobalRules0", new State(true, RegistryStorage::deleteGlobalRules)), @@ -78,24 +89,36 @@ public class ReadOnlyRegistryStorageTest { entry("getArtifactRule3", new State(false, s -> s.getArtifactRule(null, null, null))), entry("getArtifactRules2", new State(false, s -> s.getArtifactRules(null, null))), entry("getArtifactVersionContent1", new State(false, s -> s.getArtifactVersionContent(0))), - entry("getArtifactVersionContent3", new State(false, s -> s.getArtifactVersionContent(null, null, null))), - entry("getArtifactVersionComments3", new State(false, s -> s.getArtifactVersionComments(null, null, null))), - entry("getArtifactVersionMetaData1", new State(false, s -> s.getArtifactVersionMetaData(null))), - entry("getArtifactVersionMetaData3", new State(false, s -> s.getArtifactVersionMetaData(null, null, null))), - entry("getArtifactVersionMetaDataByContent5", new State(false, s -> s.getArtifactVersionMetaDataByContent(null, null, false, null, null))), + entry("getArtifactVersionContent3", + new State(false, s -> s.getArtifactVersionContent(null, null, null))), + entry("getArtifactVersionComments3", + new State(false, s -> s.getArtifactVersionComments(null, null, null))), + entry("getArtifactVersionMetaData1", + new State(false, s -> s.getArtifactVersionMetaData(null))), + entry("getArtifactVersionMetaData3", + new State(false, s -> s.getArtifactVersionMetaData(null, null, null))), + entry("getArtifactVersionMetaDataByContent5", + new State(false, + s -> s.getArtifactVersionMetaDataByContent(null, null, false, null, null))), entry("getArtifactVersions2", new State(false, s -> s.getArtifactVersions(null, null))), - entry("getArtifactVersions3", new State(false, s -> s.getArtifactVersions(null, null, RegistryStorage.RetrievalBehavior.DEFAULT))), - entry("getEnabledArtifactContentIds2", new State(false, s -> s.getEnabledArtifactContentIds(null, null))), - entry("getArtifactVersionsByContentId1", new State(false, s -> s.getArtifactVersionsByContentId(0))), + entry("getArtifactVersions3", new State(false, + s -> s.getArtifactVersions(null, null, RegistryStorage.RetrievalBehavior.DEFAULT))), + entry("getEnabledArtifactContentIds2", + new State(false, s -> s.getEnabledArtifactContentIds(null, null))), + entry("getArtifactVersionsByContentId1", + new State(false, s -> s.getArtifactVersionsByContentId(0))), entry("getConfigProperties0", new State(false, DynamicConfigStorage::getConfigProperties)), entry("getConfigProperty1", new State(false, s -> s.getConfigProperty(null))), - entry("getContentIdsReferencingArtifactVersion3", new State(false, s -> s.getContentIdsReferencingArtifactVersion(null, null, null))), - entry("getGlobalIdsReferencingArtifactVersion3", new State(false, s -> s.getGlobalIdsReferencingArtifactVersion(null, null, null))), + entry("getContentIdsReferencingArtifactVersion3", + new State(false, s -> s.getContentIdsReferencingArtifactVersion(null, null, null))), + entry("getGlobalIdsReferencingArtifactVersion3", + new State(false, s -> s.getGlobalIdsReferencingArtifactVersion(null, null, null))), entry("getGlobalRule1", new State(false, s -> s.getGlobalRule(null))), entry("getGlobalRules0", new State(false, RegistryStorage::getGlobalRules)), entry("getGroupIds1", new State(false, s -> s.getGroupIds(null))), entry("getGroupMetaData1", new State(false, s -> s.getGroupMetaData(null))), - entry("getInboundArtifactReferences3", new State(false, s -> s.getInboundArtifactReferences(null, null, null))), + entry("getInboundArtifactReferences3", + new State(false, s -> s.getInboundArtifactReferences(null, null, null))), entry("getRawConfigProperty1", new State(false, s -> s.getRawConfigProperty(null))), entry("getRoleForPrincipal1", new State(false, s -> s.getRoleForPrincipal(null))), entry("getRoleMapping1", new State(false, s -> s.getRoleMapping(null))), @@ -114,8 +137,10 @@ public class ReadOnlyRegistryStorageTest { entry("initialize0", new State(false, RegistryStorage::initialize)), entry("isAlive0", new State(false, RegistryStorage::isAlive)), entry("isArtifactExists2", new State(false, s -> s.isArtifactExists(null, null))), - entry("isArtifactRuleExists3", new State(false, s -> s.isArtifactRuleExists(null, null, null))), - entry("isArtifactVersionExists3", new State(false, s -> s.isArtifactVersionExists(null, null, null))), + entry("isArtifactRuleExists3", + new State(false, s -> s.isArtifactRuleExists(null, null, null))), + entry("isArtifactVersionExists3", + new State(false, s -> s.isArtifactVersionExists(null, null, null))), entry("isContentExists1", new State(false, s -> s.isContentExists(null))), entry("isGlobalRuleExists1", new State(false, s -> s.isGlobalRuleExists(null))), entry("isGroupExists1", new State(false, s -> s.isGroupExists(null))), @@ -125,7 +150,8 @@ public class ReadOnlyRegistryStorageTest { entry("nextCommentId0", new State(true, RegistryStorage::nextCommentId)), entry("nextContentId0", new State(true, RegistryStorage::nextContentId)), entry("nextGlobalId0", new State(true, RegistryStorage::nextGlobalId)), - entry("replaceBranchVersions3", new State(true, s -> s.replaceBranchVersions(null, null, null))), + entry("replaceBranchVersions3", + new State(true, s -> s.replaceBranchVersions(null, null, null))), entry("resetContentId0", new State(true, RegistryStorage::resetContentId)), entry("resetCommentId0", new State(true, RegistryStorage::resetCommentId)), entry("resetGlobalId0", new State(true, RegistryStorage::resetGlobalId)), @@ -137,27 +163,29 @@ public class ReadOnlyRegistryStorageTest { var dto = new DynamicConfigPropertyDto(); dto.setName("test"); s.setConfigProperty(dto); - })), - entry("storageName0", new State(false, RegistryStorage::storageName)), - entry("updateArtifactMetaData3", new State(true, s -> s.updateArtifactMetaData(null, null, null))), - entry("updateArtifactRule4", new State(true, s -> s.updateArtifactRule(null, null, null, null))), - entry("updateArtifactVersionComment5", new State(true, s -> s.updateArtifactVersionComment(null, null, null, null, null))), - entry("updateArtifactVersionMetaData4", new State(true, s -> s.updateArtifactVersionMetaData(null, null, null, null))), - entry("updateBranchMetaData3", new State(true, s -> s.updateBranchMetaData(null, null, null))), - entry("updateContentCanonicalHash3", new State(true, s -> s.updateContentCanonicalHash(null, 0, null))), + })), entry("storageName0", new State(false, RegistryStorage::storageName)), + entry("updateArtifactMetaData3", + new State(true, s -> s.updateArtifactMetaData(null, null, null))), + entry("updateArtifactRule4", + new State(true, s -> s.updateArtifactRule(null, null, null, null))), + entry("updateArtifactVersionComment5", + new State(true, s -> s.updateArtifactVersionComment(null, null, null, null, null))), + entry("updateArtifactVersionMetaData4", + new State(true, s -> s.updateArtifactVersionMetaData(null, null, null, null))), + entry("updateBranchMetaData3", + new State(true, s -> s.updateBranchMetaData(null, null, null))), + entry("updateContentCanonicalHash3", + new State(true, s -> s.updateContentCanonicalHash(null, 0, null))), entry("updateGlobalRule2", new State(true, s -> s.updateGlobalRule(null, null))), entry("updateGroupMetaData2", new State(true, s -> s.updateGroupMetaData(null, null))), entry("updateRoleMapping2", new State(true, s -> s.updateRoleMapping(null, null))), entry("triggerSnapshotCreation0", new State(true, RegistryStorage::triggerSnapshotCreation)), - entry("createSnapshot1", new State(true, s -> s.createSnapshot(null))) - ); + entry("createSnapshot1", new State(true, s -> s.createSnapshot(null)))); CURRENT_METHODS = Arrays.stream(RegistryStorage.class.getMethods()) - .map(m -> m.getName() + m.getParameterCount()) - .collect(Collectors.toSet()); + .map(m -> m.getName() + m.getParameterCount()).collect(Collectors.toSet()); } - @Test void readOnlyTest() { notEnabled(); @@ -171,16 +199,17 @@ void readOnlyTest() { notEnabled(); } - private void notEnabled() { for (String method : CURRENT_METHODS) { State state = EXPECTED_METHODS.get(method); - assertNotNull(state, "Method " + method + " in RegistryStorage interface is not covered by this test."); + assertNotNull(state, + "Method " + method + " in RegistryStorage interface is not covered by this test."); try { state.runnable.run(storage); } catch (Exception ex) { if (ex instanceof ReadOnlyStorageException) { - Assertions.fail("Unexpected ReadOnlyStorageException for method " + method + " (read-only is not enabled).", ex); + Assertions.fail("Unexpected ReadOnlyStorageException for method " + method + + " (read-only is not enabled).", ex); } } finally { state.executed = true; @@ -189,24 +218,27 @@ private void notEnabled() { reset(); } - private void enabled() { for (String method : CURRENT_METHODS) { State state = EXPECTED_METHODS.get(method); - assertNotNull(state, "Method " + method + " in RegistryStorage interface is not covered by this test."); + assertNotNull(state, + "Method " + method + " in RegistryStorage interface is not covered by this test."); try { state.runnable.run(storage); if (state.writes) { - Assertions.fail("Expected ReadOnlyStorageException for method " + method + " (read-only is enabled)."); + Assertions.fail("Expected ReadOnlyStorageException for method " + method + + " (read-only is enabled)."); } } catch (Exception ex) { if (ex instanceof ReadOnlyStorageException) { if (!state.writes) { - Assertions.fail("Unexpected ReadOnlyStorageException for method " + method + " (read-only is enabled).", ex); + Assertions.fail("Unexpected ReadOnlyStorageException for method " + method + + " (read-only is enabled).", ex); } } else { if (state.writes) { - Assertions.fail("Expected ReadOnlyStorageException for method " + method + " (read-only is enabled)."); + Assertions.fail("Expected ReadOnlyStorageException for method " + method + + " (read-only is enabled)."); } } } finally { @@ -216,17 +248,15 @@ private void enabled() { reset(); } - private void reset() { - var notExecuted = EXPECTED_METHODS.entrySet().stream() - .filter(e -> !e.getValue().executed) + var notExecuted = EXPECTED_METHODS.entrySet().stream().filter(e -> !e.getValue().executed) .collect(Collectors.toSet()); - Assertions.assertEquals(Set.of(), notExecuted, "Some method(s) expected to be in the RegistryStorage interface " + - "by this test are missing."); + Assertions.assertEquals(Set.of(), notExecuted, + "Some method(s) expected to be in the RegistryStorage interface " + + "by this test are missing."); EXPECTED_METHODS.forEach((key, value) -> value.executed = false); } - private static class State { private boolean writes; diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/sql/DefaultRegistryStorageTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/sql/DefaultRegistryStorageTest.java index e06434906e..a76c2ad998 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/sql/DefaultRegistryStorageTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/sql/DefaultRegistryStorageTest.java @@ -1,11 +1,10 @@ package io.apicurio.registry.storage.impl.sql; +import io.apicurio.registry.noprofile.storage.AbstractRegistryStorageTest; import io.apicurio.registry.storage.RegistryStorage; import io.apicurio.registry.types.Current; -import jakarta.inject.Inject; - -import io.apicurio.registry.noprofile.storage.AbstractRegistryStorageTest; import io.quarkus.test.junit.QuarkusTest; +import jakarta.inject.Inject; @QuarkusTest public class DefaultRegistryStorageTest extends AbstractRegistryStorageTest { diff --git a/app/src/test/java/io/apicurio/registry/storage/impl/sql/SqlUtilTest.java b/app/src/test/java/io/apicurio/registry/storage/impl/sql/SqlUtilTest.java index 9593cdc093..cf3572b7d0 100644 --- a/app/src/test/java/io/apicurio/registry/storage/impl/sql/SqlUtilTest.java +++ b/app/src/test/java/io/apicurio/registry/storage/impl/sql/SqlUtilTest.java @@ -1,11 +1,11 @@ package io.apicurio.registry.storage.impl.sql; -import java.util.HashMap; -import java.util.Map; - import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.util.HashMap; +import java.util.Map; + class SqlUtilTest { /** @@ -23,7 +23,8 @@ void testSerializeLabels() { } /** - * Test method for {@link io.apicurio.registry.storage.impl.sql.SqlUtil#deserializeLabels(java.lang.String)}. + * Test method for + * {@link io.apicurio.registry.storage.impl.sql.SqlUtil#deserializeLabels(java.lang.String)}. */ @Test void testDeserializeLabels() { diff --git a/app/src/test/java/io/apicurio/registry/storage/util/GitopsTestProfile.java b/app/src/test/java/io/apicurio/registry/storage/util/GitopsTestProfile.java index a71d608278..dbd2ab658c 100644 --- a/app/src/test/java/io/apicurio/registry/storage/util/GitopsTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/storage/util/GitopsTestProfile.java @@ -10,8 +10,7 @@ public class GitopsTestProfile implements QuarkusTestProfile { @Override public Map getConfigOverrides() { - return Map.of("apicurio.storage.sql.kind", "h2", - "apicurio.storage.kind", "gitops"); + return Map.of("apicurio.storage.sql.kind", "h2", "apicurio.storage.kind", "gitops"); } @Override diff --git a/app/src/test/java/io/apicurio/registry/storage/util/MssqlTestProfile.java b/app/src/test/java/io/apicurio/registry/storage/util/MssqlTestProfile.java index 7cd393a8a9..39ed1b8d33 100644 --- a/app/src/test/java/io/apicurio/registry/storage/util/MssqlTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/storage/util/MssqlTestProfile.java @@ -17,8 +17,7 @@ public Map getConfigOverrides() { @Override public List testResources() { if (!Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - return List.of( - new TestResourceEntry(MsSqlEmbeddedTestResource.class)); + return List.of(new TestResourceEntry(MsSqlEmbeddedTestResource.class)); } else { return Collections.emptyList(); } diff --git a/app/src/test/java/io/apicurio/registry/storage/util/PostgresqlTestProfile.java b/app/src/test/java/io/apicurio/registry/storage/util/PostgresqlTestProfile.java index 1182617f5e..36919791fd 100644 --- a/app/src/test/java/io/apicurio/registry/storage/util/PostgresqlTestProfile.java +++ b/app/src/test/java/io/apicurio/registry/storage/util/PostgresqlTestProfile.java @@ -17,8 +17,7 @@ public Map getConfigOverrides() { @Override public List testResources() { if (!Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - return List.of( - new TestResourceEntry(PostgreSqlEmbeddedTestResource.class)); + return List.of(new TestResourceEntry(PostgreSqlEmbeddedTestResource.class)); } else { return Collections.emptyList(); } diff --git a/app/src/test/java/io/apicurio/registry/support/Citizen.java b/app/src/test/java/io/apicurio/registry/support/Citizen.java index 63003a1128..6f75abf7cc 100644 --- a/app/src/test/java/io/apicurio/registry/support/Citizen.java +++ b/app/src/test/java/io/apicurio/registry/support/Citizen.java @@ -27,7 +27,8 @@ public class Citizen { public Citizen() { } - public Citizen(String firstName, String lastName, int age, City city, CitizenIdentifier identifier, List qualifications) { + public Citizen(String firstName, String lastName, int age, City city, CitizenIdentifier identifier, + List qualifications) { this.firstName = firstName; this.lastName = lastName; this.age = age; diff --git a/app/src/test/java/io/apicurio/registry/support/HealthResponse.java b/app/src/test/java/io/apicurio/registry/support/HealthResponse.java index cd56de9ead..56c4988b39 100644 --- a/app/src/test/java/io/apicurio/registry/support/HealthResponse.java +++ b/app/src/test/java/io/apicurio/registry/support/HealthResponse.java @@ -8,8 +8,7 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class HealthResponse { public static enum Status { - UP, - DOWN + UP, DOWN } @JsonProperty("status") diff --git a/app/src/test/java/io/apicurio/registry/support/HealthUtils.java b/app/src/test/java/io/apicurio/registry/support/HealthUtils.java index e9462a0c9c..4ba30c924d 100644 --- a/app/src/test/java/io/apicurio/registry/support/HealthUtils.java +++ b/app/src/test/java/io/apicurio/registry/support/HealthUtils.java @@ -9,8 +9,7 @@ public class HealthUtils { public enum Type { - READY, - LIVE + READY, LIVE } public static void assertHealthCheck(int port, Type type, HealthResponse.Status status) throws Exception { diff --git a/app/src/test/java/io/apicurio/registry/support/Qualification.java b/app/src/test/java/io/apicurio/registry/support/Qualification.java index cab5cbc9ec..7a586fa035 100644 --- a/app/src/test/java/io/apicurio/registry/support/Qualification.java +++ b/app/src/test/java/io/apicurio/registry/support/Qualification.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; - public class Qualification { @JsonProperty("subject_name") diff --git a/app/src/test/java/io/apicurio/registry/support/TestCmmn.java b/app/src/test/java/io/apicurio/registry/support/TestCmmn.java index fb6a36036d..0c2cdb07be 100644 --- a/app/src/test/java/io/apicurio/registry/support/TestCmmn.java +++ b/app/src/test/java/io/apicurio/registry/support/TestCmmn.java @@ -4,19 +4,19 @@ package io.apicurio.registry.support; public final class TestCmmn { - private TestCmmn() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { + private TestCmmn() { } - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) { } + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface UUIDOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.registry.common.proto.UUID) - com.google.protobuf.MessageOrBuilder { + // @@protoc_insertion_point(interface_extends:io.apicurio.registry.common.proto.UUID) + com.google.protobuf.MessageOrBuilder { /** * fixed64 msb = 1; @@ -28,42 +28,42 @@ public interface UUIDOrBuilder extends */ long getLsb(); } + /** * Protobuf type {@code io.apicurio.registry.common.proto.UUID} */ - public static final class UUID extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.registry.common.proto.UUID) - UUIDOrBuilder { + public static final class UUID extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.registry.common.proto.UUID) + UUIDOrBuilder { private static final long serialVersionUID = 0L; + // Use UUID.newBuilder() to construct. private UUID(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } + private UUID() { } @java.lang.Override - protected java.lang.Object newInstance( - UnusedPrivateParameter unused) { + protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UUID(); } @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } - private UUID( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + private UUID(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); try { boolean done = false; while (!done) { @@ -83,8 +83,7 @@ private UUID( break; } default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; @@ -94,28 +93,26 @@ private UUID( } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable - .ensureFieldAccessorsInitialized( - TestCmmn.UUID.class, TestCmmn.UUID.Builder.class); + .ensureFieldAccessorsInitialized(TestCmmn.UUID.class, TestCmmn.UUID.Builder.class); } public static final int MSB_FIELD_NUMBER = 1; private long msb_; + /** * fixed64 msb = 1; */ @@ -126,6 +123,7 @@ public long getMsb() { public static final int LSB_FIELD_NUMBER = 2; private long lsb_; + /** * fixed64 lsb = 2; */ @@ -135,19 +133,21 @@ public long getLsb() { } private byte memoizedIsInitialized = -1; + @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; memoizedIsInitialized = 1; return true; } @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (msb_ != 0L) { output.writeFixed64(1, msb_); } @@ -160,16 +160,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) @java.lang.Override public int getSerializedSize() { int size = memoizedSize; - if (size != -1) return size; + if (size != -1) + return size; size = 0; if (msb_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeFixed64Size(1, msb_); + size += com.google.protobuf.CodedOutputStream.computeFixed64Size(1, msb_); } if (lsb_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeFixed64Size(2, lsb_); + size += com.google.protobuf.CodedOutputStream.computeFixed64Size(2, lsb_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -186,11 +185,12 @@ public boolean equals(final java.lang.Object obj) { } TestCmmn.UUID other = (TestCmmn.UUID) obj; - if (getMsb() - != other.getMsb()) return false; - if (getLsb() - != other.getLsb()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (getMsb() != other.getMsb()) + return false; + if (getLsb() != other.getLsb()) + return false; + if (!unknownFields.equals(other.unknownFields)) + return false; return true; } @@ -203,124 +203,117 @@ public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + MSB_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getMsb()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getMsb()); hash = (37 * hash) + LSB_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getLsb()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getLsb()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static TestCmmn.UUID parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { + public static TestCmmn.UUID parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static TestCmmn.UUID parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static TestCmmn.UUID parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static TestCmmn.UUID parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static TestCmmn.UUID parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static TestCmmn.UUID parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static TestCmmn.UUID parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } + public static TestCmmn.UUID parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static TestCmmn.UUID parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static TestCmmn.UUID parseFrom(byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static TestCmmn.UUID parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static TestCmmn.UUID parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static TestCmmn.UUID parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static TestCmmn.UUID parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static TestCmmn.UUID parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static TestCmmn.UUID parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + + public static TestCmmn.UUID parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static TestCmmn.UUID parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); + } + + public static TestCmmn.UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static TestCmmn.UUID parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static TestCmmn.UUID parseFrom(com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static TestCmmn.UUID parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); } @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() { + return newBuilder(); + } + public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } + public static Builder newBuilder(TestCmmn.UUID prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @java.lang.Override public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** * Protobuf type {@code io.apicurio.registry.common.proto.UUID} */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.registry.common.proto.UUID) - TestCmmn.UUIDOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.registry.common.proto.UUID) + TestCmmn.UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable - .ensureFieldAccessorsInitialized( - TestCmmn.UUID.class, TestCmmn.UUID.Builder.class); + .ensureFieldAccessorsInitialized(TestCmmn.UUID.class, TestCmmn.UUID.Builder.class); } // Construct using io.apicurio.registry.support.Cmmn.UUID.newBuilder() @@ -328,16 +321,16 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } + private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { } } + @java.lang.Override public Builder clear() { super.clear(); @@ -349,8 +342,7 @@ public Builder clear() { } @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @@ -381,38 +373,39 @@ public TestCmmn.UUID buildPartial() { public Builder clone() { return super.clone(); } + @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { return super.setField(field, value); } + @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } + @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } + @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + java.lang.Object value) { return super.setRepeatedField(field, index, value); } + @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { return super.addRepeatedField(field, value); } + @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof TestCmmn.UUID) { - return mergeFrom((TestCmmn.UUID)other); + return mergeFrom((TestCmmn.UUID) other); } else { super.mergeFrom(other); return this; @@ -420,7 +413,8 @@ public Builder mergeFrom(com.google.protobuf.Message other) { } public Builder mergeFrom(TestCmmn.UUID other) { - if (other == TestCmmn.UUID.getDefaultInstance()) return this; + if (other == TestCmmn.UUID.getDefaultInstance()) + return this; if (other.getMsb() != 0L) { setMsb(other.getMsb()); } @@ -438,10 +432,8 @@ public final boolean isInitialized() { } @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { TestCmmn.UUID parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); @@ -456,7 +448,8 @@ public Builder mergeFrom( return this; } - private long msb_ ; + private long msb_; + /** * fixed64 msb = 1; */ @@ -464,6 +457,7 @@ public Builder mergeFrom( public long getMsb() { return msb_; } + /** * fixed64 msb = 1; */ @@ -473,6 +467,7 @@ public Builder setMsb(long value) { onChanged(); return this; } + /** * fixed64 msb = 1; */ @@ -483,7 +478,8 @@ public Builder clearMsb() { return this; } - private long lsb_ ; + private long lsb_; + /** * fixed64 lsb = 2; */ @@ -491,6 +487,7 @@ public Builder clearMsb() { public long getLsb() { return lsb_; } + /** * fixed64 lsb = 2; */ @@ -500,6 +497,7 @@ public Builder setLsb(long value) { onChanged(); return this; } + /** * fixed64 lsb = 2; */ @@ -509,19 +507,17 @@ public Builder clearLsb() { onChanged(); return this; } + @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } - // @@protoc_insertion_point(builder_scope:io.apicurio.registry.common.proto.UUID) } @@ -535,13 +531,11 @@ public static TestCmmn.UUID getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public UUID parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + public UUID parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return new UUID(input, extensionRegistry); } }; @@ -562,35 +556,25 @@ public TestCmmn.UUID getDefaultInstanceForType() { } - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_registry_common_proto_UUID_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_registry_common_proto_UUID_descriptor; + private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable; - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { - java.lang.String[] descriptorData = { - "\n\014common.proto\022!io.apicurio.registry.com" + - "mon.proto\" \n\004UUID\022\013\n\003msb\030\001 \001(\006\022\013\n\003lsb\030\002 " + - "\001(\006B)\n!io.apicurio.registry.common.proto" + - "B\004Cmmnb\006proto3" - }; - descriptor = com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }); - internal_static_io_apicurio_registry_common_proto_UUID_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_registry_common_proto_UUID_descriptor, - new java.lang.String[] { "Msb", "Lsb", }); + java.lang.String[] descriptorData = { "\n\014common.proto\022!io.apicurio.registry.com" + + "mon.proto\" \n\004UUID\022\013\n\003msb\030\001 \001(\006\022\013\n\003lsb\030\002 " + + "\001(\006B)\n!io.apicurio.registry.common.proto" + "B\004Cmmnb\006proto3" }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}); + internal_static_io_apicurio_registry_common_proto_UUID_descriptor = getDescriptor().getMessageTypes() + .get(0); + internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_registry_common_proto_UUID_descriptor, + new java.lang.String[] { "Msb", "Lsb", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/app/src/test/java/io/apicurio/registry/support/Tester.java b/app/src/test/java/io/apicurio/registry/support/Tester.java index 48473af072..1905e52453 100644 --- a/app/src/test/java/io/apicurio/registry/support/Tester.java +++ b/app/src/test/java/io/apicurio/registry/support/Tester.java @@ -14,6 +14,7 @@ public Tester(String name, TesterState state) { this.name = name; this.state = state; } + public String getName() { return name; } @@ -32,8 +33,10 @@ public void setState(TesterState state) { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; Tester tester = (Tester) o; return Objects.equals(name, tester.name) && state == tester.state; } diff --git a/app/src/test/java/io/apicurio/registry/util/ArtifactTypeUtilTest.java b/app/src/test/java/io/apicurio/registry/util/ArtifactTypeUtilTest.java index ad3aa61993..e04133c0f0 100644 --- a/app/src/test/java/io/apicurio/registry/util/ArtifactTypeUtilTest.java +++ b/app/src/test/java/io/apicurio/registry/util/ArtifactTypeUtilTest.java @@ -14,14 +14,15 @@ import static org.junit.jupiter.api.Assertions.assertEquals; class ArtifactTypeUtilTest extends AbstractRegistryTestBase { - + static ArtifactTypeUtilProviderFactory artifactTypeUtilProviderFactory; static { artifactTypeUtilProviderFactory = new DefaultArtifactTypeUtilProviderImpl(); } - + /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)} + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)} */ @Test void testDiscoverType_JSON() { @@ -31,7 +32,8 @@ void testDiscoverType_JSON() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Avro() { @@ -41,7 +43,8 @@ void testDiscoverType_Avro() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Avro_Simple() { @@ -54,7 +57,8 @@ void testDiscoverType_Avro_Simple() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Proto() { @@ -68,7 +72,8 @@ void testDiscoverType_Proto() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_OpenApi() { @@ -86,7 +91,8 @@ void testDiscoverType_OpenApi() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_AsyncApi() { @@ -96,7 +102,8 @@ void testDiscoverType_AsyncApi() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_GraphQL() { @@ -106,7 +113,8 @@ void testDiscoverType_GraphQL() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_DefaultNotFound() { @@ -117,7 +125,8 @@ void testDiscoverType_DefaultNotFound() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Xml() { @@ -127,7 +136,8 @@ void testDiscoverType_Xml() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Xsd() { @@ -137,7 +147,8 @@ void testDiscoverType_Xsd() { } /** - * Test method for {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. + * Test method for + * {@link io.apicurio.registry.util.ArtifactTypeUtil#determineArtifactType(TypedContent, String, ArtifactTypeUtilProviderFactory)}. */ @Test void testDiscoverType_Wsdl() { diff --git a/app/src/test/java/io/apicurio/registry/util/ContentTypeUtilTest.java b/app/src/test/java/io/apicurio/registry/util/ContentTypeUtilTest.java index eb207b94c1..4b74af4d16 100644 --- a/app/src/test/java/io/apicurio/registry/util/ContentTypeUtilTest.java +++ b/app/src/test/java/io/apicurio/registry/util/ContentTypeUtilTest.java @@ -1,56 +1,26 @@ package io.apicurio.registry.util; +import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.content.util.ContentTypeUtil; +import io.apicurio.registry.utils.tests.TestUtils; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import io.apicurio.registry.content.ContentHandle; -import io.apicurio.registry.utils.tests.TestUtils; - class ContentTypeUtilTest { - private static final String YAML_CONTENT = "openapi: 3.0.2\r\n" + - "info:\r\n" + - " title: Empty API\r\n" + - " version: 1.0.0\r\n" + - "paths:\r\n" + - " /mice:\r\n" + - " get:\r\n" + - " responses:\r\n" + - " '200':\r\n" + - " description: ...\r\n" + - "components:\r\n" + - " schemas:\r\n" + - " Mouse:\r\n" + - " description: ''\r\n" + - " type: object\r\n" + - ""; - private static final String JSON_CONTENT = "{\r\n" + - " \"openapi\" : \"3.0.2\",\r\n" + - " \"info\" : {\r\n" + - " \"title\" : \"Empty API\",\r\n" + - " \"version\" : \"1.0.0\"\r\n" + - " },\r\n" + - " \"paths\" : {\r\n" + - " \"/mice\" : {\r\n" + - " \"get\" : {\r\n" + - " \"responses\" : {\r\n" + - " \"200\" : {\r\n" + - " \"description\" : \"...\"\r\n" + - " }\r\n" + - " }\r\n" + - " }\r\n" + - " }\r\n" + - " },\r\n" + - " \"components\" : {\r\n" + - " \"schemas\" : {\r\n" + - " \"Mouse\" : {\r\n" + - " \"description\" : \"\",\r\n" + - " \"type\" : \"object\"\r\n" + - " }\r\n" + - " }\r\n" + - " }\r\n" + - "}"; + private static final String YAML_CONTENT = "openapi: 3.0.2\r\n" + "info:\r\n" + " title: Empty API\r\n" + + " version: 1.0.0\r\n" + "paths:\r\n" + " /mice:\r\n" + " get:\r\n" + + " responses:\r\n" + " '200':\r\n" + + " description: ...\r\n" + "components:\r\n" + " schemas:\r\n" + + " Mouse:\r\n" + " description: ''\r\n" + " type: object\r\n" + ""; + private static final String JSON_CONTENT = "{\r\n" + " \"openapi\" : \"3.0.2\",\r\n" + + " \"info\" : {\r\n" + " \"title\" : \"Empty API\",\r\n" + " \"version\" : \"1.0.0\"\r\n" + + " },\r\n" + " \"paths\" : {\r\n" + " \"/mice\" : {\r\n" + " \"get\" : {\r\n" + + " \"responses\" : {\r\n" + " \"200\" : {\r\n" + + " \"description\" : \"...\"\r\n" + " }\r\n" + " }\r\n" + + " }\r\n" + " }\r\n" + " },\r\n" + " \"components\" : {\r\n" + + " \"schemas\" : {\r\n" + " \"Mouse\" : {\r\n" + " \"description\" : \"\",\r\n" + + " \"type\" : \"object\"\r\n" + " }\r\n" + " }\r\n" + " }\r\n" + "}"; /** * Test method for {@link ContentTypeUtil#yamlToJson(io.apicurio.registry.content.ContentHandle)}. @@ -59,7 +29,8 @@ class ContentTypeUtilTest { void testYamlToJson() throws Exception { ContentHandle yaml = ContentHandle.create(YAML_CONTENT); ContentHandle json = ContentTypeUtil.yamlToJson(yaml); - Assertions.assertEquals(TestUtils.normalizeMultiLineString(JSON_CONTENT), TestUtils.normalizeMultiLineString(json.content())); + Assertions.assertEquals(TestUtils.normalizeMultiLineString(JSON_CONTENT), + TestUtils.normalizeMultiLineString(json.content())); } } diff --git a/app/src/test/java/io/apicurio/registry/util/PropertiesLoader.java b/app/src/test/java/io/apicurio/registry/util/PropertiesLoader.java index 9f308667ff..2a542d09f9 100644 --- a/app/src/test/java/io/apicurio/registry/util/PropertiesLoader.java +++ b/app/src/test/java/io/apicurio/registry/util/PropertiesLoader.java @@ -15,7 +15,8 @@ public class PropertiesLoader { * Loads properties file from the classpath. */ private static void loadProperties(final String fileName) { - try (InputStream inputStream = PropertiesLoader.class.getClassLoader().getResourceAsStream(fileName)) { + try ( + InputStream inputStream = PropertiesLoader.class.getClassLoader().getResourceAsStream(fileName)) { properties.load(inputStream); } catch (Exception e) { throw new RuntimeException(e); diff --git a/common/pom.xml b/common/pom.xml index 6a9d44a7ed..8e003990b1 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -1,148 +1,145 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-common - jar - apicurio-registry-common + apicurio-registry-common + jar + apicurio-registry-common - - - jakarta.ws.rs - jakarta.ws.rs-api - + + + jakarta.ws.rs + jakarta.ws.rs-api + - - jakarta.validation - jakarta.validation-api - + + jakarta.validation + jakarta.validation-api + - - com.fasterxml.jackson.core - jackson-annotations - + + com.fasterxml.jackson.core + jackson-annotations + - - io.quarkus - quarkus-core - provided - + + io.quarkus + quarkus-core + provided + - - io.quarkus - quarkus-jackson - provided - + + io.quarkus + quarkus-jackson + provided + - - io.quarkus - quarkus-resteasy-jackson - provided - + + io.quarkus + quarkus-resteasy-jackson + provided + - - org.eclipse.microprofile.config - microprofile-config-api - provided - + + org.eclipse.microprofile.config + microprofile-config-api + provided + - - io.quarkus - quarkus-micrometer-registry-prometheus - provided - + + io.quarkus + quarkus-micrometer-registry-prometheus + provided + - - io.apicurio - apicurio-common-app-components-config-definitions - provided - + + io.apicurio + apicurio-common-app-components-config-definitions + provided + - - org.projectlombok - lombok - compile - + + org.projectlombok + lombok + compile + - - org.junit.jupiter - junit-jupiter - test - + + org.junit.jupiter + junit-jupiter + test + - + - - - - kr.motd.maven - os-maven-plugin - ${os-maven-plugin.version} - - + - - - io.apicurio - apicurio-codegen-maven-plugin - 1.1.1.Final - - - generate-api-v3 - generate-sources - - generate - - - - io.apicurio.registry.rest.v3 - - ${project.basedir}/src/main/resources/META-INF/openapi.json - - - - generate-api-v2 - generate-sources - - generate - - - - io.apicurio.registry.rest.v2 - - ${project.basedir}/src/main/resources/META-INF/openapi-v2.json - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 3.6.0 - - - addSource - generate-sources - - add-source - - - - ${project.basedir}/target/generated-sources/jaxrs - - - - - - - + + + io.apicurio + apicurio-codegen-maven-plugin + 1.1.1.Final + + + generate-api-v3 + + generate + + generate-sources + + + io.apicurio.registry.rest.v3 + + ${project.basedir}/src/main/resources/META-INF/openapi.json + + + + generate-api-v2 + + generate + + generate-sources + + + io.apicurio.registry.rest.v2 + + ${project.basedir}/src/main/resources/META-INF/openapi-v2.json + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.6.0 + + + addSource + + add-source + + generate-sources + + + ${project.basedir}/target/generated-sources/jaxrs + + + + + + + + + kr.motd.maven + os-maven-plugin + ${os-maven-plugin.version} + + + diff --git a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactId.java b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactId.java index da406247bc..32f632126a 100644 --- a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactId.java +++ b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactId.java @@ -3,23 +3,14 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; - import io.quarkus.runtime.annotations.RegisterForReflection; - /** * Root Type for ArtifactId *

- * - * */ @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonPropertyOrder({ - "groupId", - "artifactId", - "version", - "type" -}) +@JsonPropertyOrder({ "groupId", "artifactId", "version", "type" }) @RegisterForReflection public class ArtifactId { @@ -27,9 +18,7 @@ public class ArtifactId { private String groupId; /** - * * (Required) - * */ @JsonProperty("artifactId") private String artifactId; @@ -51,9 +40,7 @@ public void setGroupId(String groupId) { } /** - * * (Required) - * */ @JsonProperty("artifactId") public String getArtifactId() { @@ -61,9 +48,7 @@ public String getArtifactId() { } /** - * * (Required) - * */ @JsonProperty("artifactId") public void setArtifactId(String artifactId) { diff --git a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactRuleChange.java b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactRuleChange.java index 012f4b36a2..3cc560f064 100644 --- a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactRuleChange.java +++ b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactRuleChange.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; - import io.quarkus.runtime.annotations.RegisterForReflection; @RegisterForReflection diff --git a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactStateChange.java b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactStateChange.java index 32e12736e2..3d8bebd56e 100644 --- a/common/src/main/java/io/apicurio/registry/events/dto/ArtifactStateChange.java +++ b/common/src/main/java/io/apicurio/registry/events/dto/ArtifactStateChange.java @@ -3,23 +3,14 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; - import io.quarkus.runtime.annotations.RegisterForReflection; - /** * Root Type for ArtifactStateChange *

- * - * */ @JsonInclude(JsonInclude.Include.NON_NULL) -@JsonPropertyOrder({ - "groupId", - "artifactId", - "state", - "version" -}) +@JsonPropertyOrder({ "groupId", "artifactId", "state", "version" }) @RegisterForReflection public class ArtifactStateChange { @@ -27,16 +18,12 @@ public class ArtifactStateChange { private String groupId; /** - * * (Required) - * */ @JsonProperty("artifactId") private String artifactId; /** - * * (Required) - * */ @JsonProperty("state") private String state; @@ -54,9 +41,7 @@ public void setGroupId(String groupId) { } /** - * * (Required) - * */ @JsonProperty("artifactId") public String getArtifactId() { @@ -64,9 +49,7 @@ public String getArtifactId() { } /** - * * (Required) - * */ @JsonProperty("artifactId") public void setArtifactId(String artifactId) { @@ -74,9 +57,7 @@ public void setArtifactId(String artifactId) { } /** - * * (Required) - * */ @JsonProperty("state") public String getState() { @@ -84,9 +65,7 @@ public String getState() { } /** - * * (Required) - * */ @JsonProperty("state") public void setState(String state) { diff --git a/common/src/main/java/io/apicurio/registry/events/dto/RegistryEventType.java b/common/src/main/java/io/apicurio/registry/events/dto/RegistryEventType.java index 3781b4b5e0..df57bf8a04 100644 --- a/common/src/main/java/io/apicurio/registry/events/dto/RegistryEventType.java +++ b/common/src/main/java/io/apicurio/registry/events/dto/RegistryEventType.java @@ -5,32 +5,22 @@ @RegisterForReflection public enum RegistryEventType { - GROUP_CREATED, - GROUP_UPDATED, - GROUP_DELETED, + GROUP_CREATED, GROUP_UPDATED, GROUP_DELETED, ARTIFACTS_IN_GROUP_DELETED, - ARTIFACT_CREATED, - ARTIFACT_UPDATED, - ARTIFACT_DELETED, + ARTIFACT_CREATED, ARTIFACT_UPDATED, ARTIFACT_DELETED, ARTIFACT_STATE_CHANGED, - ARTIFACT_RULE_CREATED, - ARTIFACT_RULE_UPDATED, - ARTIFACT_RULE_DELETED, - ALL_ARTIFACT_RULES_DELETED, + ARTIFACT_RULE_CREATED, ARTIFACT_RULE_UPDATED, ARTIFACT_RULE_DELETED, ALL_ARTIFACT_RULES_DELETED, - GLOBAL_RULE_CREATED, - GLOBAL_RULE_UPDATED, - GLOBAL_RULE_DELETED, - ALL_GLOBAL_RULES_DELETED; + GLOBAL_RULE_CREATED, GLOBAL_RULE_UPDATED, GLOBAL_RULE_DELETED, ALL_GLOBAL_RULES_DELETED; private String cloudEventType; private RegistryEventType() { - this.cloudEventType = "io.apicurio.registry."+this.name().toLowerCase().replace("_", "-"); + this.cloudEventType = "io.apicurio.registry." + this.name().toLowerCase().replace("_", "-"); } public String cloudEventType() { diff --git a/common/src/main/java/io/apicurio/registry/exception/RuntimeAssertionFailedException.java b/common/src/main/java/io/apicurio/registry/exception/RuntimeAssertionFailedException.java index c1f9e4da7e..753336fe68 100644 --- a/common/src/main/java/io/apicurio/registry/exception/RuntimeAssertionFailedException.java +++ b/common/src/main/java/io/apicurio/registry/exception/RuntimeAssertionFailedException.java @@ -4,12 +4,10 @@ public class RuntimeAssertionFailedException extends RuntimeException { private static final String PREFIX = "Runtime assertion failed: "; - public RuntimeAssertionFailedException(String message) { super(PREFIX + message); } - public RuntimeAssertionFailedException(String message, Throwable cause) { super(PREFIX + message, cause); } diff --git a/common/src/main/java/io/apicurio/registry/exception/UnreachableCodeException.java b/common/src/main/java/io/apicurio/registry/exception/UnreachableCodeException.java index a59cb1c29e..c1c909eee5 100644 --- a/common/src/main/java/io/apicurio/registry/exception/UnreachableCodeException.java +++ b/common/src/main/java/io/apicurio/registry/exception/UnreachableCodeException.java @@ -4,22 +4,18 @@ public class UnreachableCodeException extends RuntimeAssertionFailedException { private static final String PREFIX = "Unreachable code"; - public UnreachableCodeException() { super(PREFIX); } - public UnreachableCodeException(String message) { super(PREFIX + ": " + message); } - public UnreachableCodeException(Throwable cause) { super(PREFIX + ": Unexpected exception", cause); } - public UnreachableCodeException(String message, Throwable cause) { super(PREFIX + ": " + message, cause); } diff --git a/common/src/main/java/io/apicurio/registry/model/ArtifactId.java b/common/src/main/java/io/apicurio/registry/model/ArtifactId.java index 00bbd4acd8..c2e4721f48 100644 --- a/common/src/main/java/io/apicurio/registry/model/ArtifactId.java +++ b/common/src/main/java/io/apicurio/registry/model/ArtifactId.java @@ -14,22 +14,19 @@ public final class ArtifactId { private final String rawArtifactId; - public ArtifactId(String rawArtifactId) { if (!isValid(rawArtifactId)) { - throw new ValidationException("Artifact ID '" + rawArtifactId + "' is invalid. " + - "It must have length 1..512 (inclusive)."); + throw new ValidationException("Artifact ID '" + rawArtifactId + "' is invalid. " + + "It must have length 1..512 (inclusive)."); } this.rawArtifactId = rawArtifactId; } - @Override public String toString() { return rawArtifactId; } - public static boolean isValid(String rawArtifactId) { return rawArtifactId != null && VALID_PATTERN.matcher(rawArtifactId).matches(); } diff --git a/common/src/main/java/io/apicurio/registry/model/BranchId.java b/common/src/main/java/io/apicurio/registry/model/BranchId.java index ab1534e538..d0b1ced7ca 100644 --- a/common/src/main/java/io/apicurio/registry/model/BranchId.java +++ b/common/src/main/java/io/apicurio/registry/model/BranchId.java @@ -6,16 +6,13 @@ import java.util.regex.Pattern; - @Getter @EqualsAndHashCode public final class BranchId { /** - * Pattern requirements: - * - Must not contain reserved characters ":=,<>" (see VersionExpressionParser) - * - Must accept semver string - * - Must fit in the database column + * Pattern requirements: - Must not contain reserved characters ":=,<>" (see VersionExpressionParser) - + * Must accept semver string - Must fit in the database column */ private static final Pattern VALID_PATTERN = Pattern.compile("[a-zA-Z0-9._\\-+]{1,256}"); @@ -23,22 +20,19 @@ public final class BranchId { private final String rawBranchId; - public BranchId(String rawBranchId) { if (!isValid(rawBranchId)) { - throw new ValidationException("Branch ID '" + rawBranchId + "' is invalid. " + - "It must consist of alphanumeric characters or '._-+', and have length 1..256 (inclusive)."); + throw new ValidationException("Branch ID '" + rawBranchId + "' is invalid. " + + "It must consist of alphanumeric characters or '._-+', and have length 1..256 (inclusive)."); } this.rawBranchId = rawBranchId; } - @Override public String toString() { return rawBranchId; } - public static boolean isValid(String rawBranchId) { return rawBranchId != null && VALID_PATTERN.matcher(rawBranchId).matches(); } diff --git a/common/src/main/java/io/apicurio/registry/model/GA.java b/common/src/main/java/io/apicurio/registry/model/GA.java index 23460b2f2c..158fb7768b 100644 --- a/common/src/main/java/io/apicurio/registry/model/GA.java +++ b/common/src/main/java/io/apicurio/registry/model/GA.java @@ -3,25 +3,21 @@ import lombok.EqualsAndHashCode; import lombok.Getter; - @Getter @EqualsAndHashCode(callSuper = true) public class GA extends GroupId { private final ArtifactId artifactId; - public GA(String groupId, String artifactId) { super(groupId); this.artifactId = new ArtifactId(artifactId); } - public String getRawArtifactId() { return artifactId.getRawArtifactId(); } - @Override public String toString() { return super.toString() + ":" + artifactId; diff --git a/common/src/main/java/io/apicurio/registry/model/GAV.java b/common/src/main/java/io/apicurio/registry/model/GAV.java index a3c9cf516f..0f09be5359 100644 --- a/common/src/main/java/io/apicurio/registry/model/GAV.java +++ b/common/src/main/java/io/apicurio/registry/model/GAV.java @@ -9,29 +9,24 @@ public final class GAV extends GA { private final VersionId versionId; - public GAV(String rawGroupId, String rawArtifactId, String rawVersionId) { super(rawGroupId, rawArtifactId); this.versionId = new VersionId(rawVersionId); } - public GAV(GA ga, VersionId versionId) { super(ga.getRawGroupId(), ga.getRawArtifactId()); this.versionId = versionId; } - public GAV(GA ga, String rawVersionId) { this(ga, new VersionId(rawVersionId)); } - public String getRawVersionId() { return versionId.getRawVersionId(); } - @Override public String toString() { return super.toString() + ":" + versionId; diff --git a/common/src/main/java/io/apicurio/registry/model/GroupId.java b/common/src/main/java/io/apicurio/registry/model/GroupId.java index 639cbe09c8..fa8c220c9c 100644 --- a/common/src/main/java/io/apicurio/registry/model/GroupId.java +++ b/common/src/main/java/io/apicurio/registry/model/GroupId.java @@ -14,43 +14,39 @@ public class GroupId { private static final String DEFAULT_STRING = "default"; - private static final String DEFAULT_RAW_GROUP_ID = "__$GROUPID$__"; // TODO: Consider using "default" as a default group ID. + private static final String DEFAULT_RAW_GROUP_ID = "__$GROUPID$__"; // TODO: Consider using "default" as a + // default group ID. public static final GroupId DEFAULT = new GroupId(DEFAULT_RAW_GROUP_ID); private final String rawGroupId; - public GroupId(String rawGroupId) { if (!isValid(rawGroupId)) { - throw new ValidationException("Group ID '" + rawGroupId + "' is invalid. " + - "It must have length 1..512 (inclusive)."); + throw new ValidationException( + "Group ID '" + rawGroupId + "' is invalid. " + "It must have length 1..512 (inclusive)."); } - this.rawGroupId = rawGroupId == null || DEFAULT_STRING.equalsIgnoreCase(rawGroupId) ? DEFAULT_RAW_GROUP_ID : rawGroupId; + this.rawGroupId = rawGroupId == null || DEFAULT_STRING.equalsIgnoreCase(rawGroupId) + ? DEFAULT_RAW_GROUP_ID : rawGroupId; } - public boolean isDefaultGroup() { return DEFAULT.getRawGroupId().equals(rawGroupId); } - public String getRawGroupIdWithDefaultString() { return isDefaultGroup() ? DEFAULT_STRING : rawGroupId; } - public String getRawGroupIdWithNull() { return isDefaultGroup() ? null : rawGroupId; } - @Override public String toString() { return getRawGroupIdWithDefaultString(); } - public static boolean isValid(String rawGroupId) { return rawGroupId == null || VALID_PATTERN.matcher(rawGroupId).matches(); } diff --git a/common/src/main/java/io/apicurio/registry/model/VersionExpressionParser.java b/common/src/main/java/io/apicurio/registry/model/VersionExpressionParser.java index 2231c447c3..ffb9e359b9 100644 --- a/common/src/main/java/io/apicurio/registry/model/VersionExpressionParser.java +++ b/common/src/main/java/io/apicurio/registry/model/VersionExpressionParser.java @@ -7,11 +7,9 @@ public class VersionExpressionParser { - private VersionExpressionParser() { } - public static GAV parse(GA ga, String versionExpression, BiFunction branchToVersion) { if (VersionId.isValid(versionExpression)) { return new GAV(ga, versionExpression); @@ -29,7 +27,6 @@ public static GAV parse(GA ga, String versionExpression, BiFunction" (see VersionExpressionParser) - * - Must accept semver string - * - Must fit in the database column + * Pattern requirements: - Must not contain reserved characters ":=,<>" (see VersionExpressionParser) - + * Must accept semver string - Must fit in the database column */ - private static final Pattern VALID_PATTERN = Pattern.compile("[a-zA-Z0-9._\\-+]{1,256}"); // TODO: UPGRADE INCOMPATIBILITY + private static final Pattern VALID_PATTERN = Pattern.compile("[a-zA-Z0-9._\\-+]{1,256}"); // TODO: UPGRADE + // INCOMPATIBILITY private final String rawVersionId; - public VersionId(String rawVersionId) { if (!isValid(rawVersionId)) { - throw new ValidationException("Version ID '" + rawVersionId + "' is invalid. " + - "It must consist of alphanumeric characters or '._-+', and have length 1..256 (inclusive)."); + throw new ValidationException("Version ID '" + rawVersionId + "' is invalid. " + + "It must consist of alphanumeric characters or '._-+', and have length 1..256 (inclusive)."); } this.rawVersionId = rawVersionId; } - @Override public String toString() { return rawVersionId; } - public static boolean isValid(String rawVersionId) { return rawVersionId != null && VALID_PATTERN.matcher(rawVersionId).matches(); } diff --git a/common/src/main/java/io/apicurio/registry/rest/Headers.java b/common/src/main/java/io/apicurio/registry/rest/Headers.java index 01a04bc034..648258297e 100644 --- a/common/src/main/java/io/apicurio/registry/rest/Headers.java +++ b/common/src/main/java/io/apicurio/registry/rest/Headers.java @@ -1,8 +1,8 @@ package io.apicurio.registry.rest; import io.apicurio.registry.types.ArtifactState; - import jakarta.ws.rs.core.Response; + import java.util.function.Supplier; public interface Headers { @@ -21,13 +21,8 @@ public interface Headers { String PRESERVE_GLOBAL_ID = "X-Registry-Preserve-GlobalId"; String PRESERVE_CONTENT_ID = "X-Registry-Preserve-ContentId"; - default void checkIfDeprecated( - Supplier stateSupplier, - String groupId, - String artifactId, - Number version, - Response.ResponseBuilder builder - ) { + default void checkIfDeprecated(Supplier stateSupplier, String groupId, String artifactId, + Number version, Response.ResponseBuilder builder) { if (stateSupplier.get() == ArtifactState.DEPRECATED) { builder.header(Headers.DEPRECATED, true); builder.header(Headers.GROUP_ID, groupId); diff --git a/common/src/main/java/io/apicurio/registry/rest/JacksonDateTimeCustomizer.java b/common/src/main/java/io/apicurio/registry/rest/JacksonDateTimeCustomizer.java index 2c3a362ad4..ea2995dccd 100644 --- a/common/src/main/java/io/apicurio/registry/rest/JacksonDateTimeCustomizer.java +++ b/common/src/main/java/io/apicurio/registry/rest/JacksonDateTimeCustomizer.java @@ -1,29 +1,26 @@ package io.apicurio.registry.rest; -import java.text.SimpleDateFormat; -import java.util.TimeZone; - +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import io.apicurio.common.apps.config.Info; +import io.quarkus.jackson.ObjectMapperCustomizer; import jakarta.annotation.PostConstruct; import jakarta.inject.Singleton; - import org.eclipse.microprofile.config.inject.ConfigProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; - -import io.apicurio.common.apps.config.Info; -import io.quarkus.jackson.ObjectMapperCustomizer; +import java.text.SimpleDateFormat; +import java.util.TimeZone; @Singleton public class JacksonDateTimeCustomizer implements ObjectMapperCustomizer { - + private static Logger log = LoggerFactory.getLogger(JacksonDateTimeCustomizer.class); - + private static final String DEFAULT_DATE_TIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'"; private static final String DEFAULT_DATE_TIME_FORMAT_TZ = "UTC"; - + @ConfigProperty(name = "apicurio.apis.v2.date-format", defaultValue = DEFAULT_DATE_TIME_FORMAT) @Info(category = "api", description = "API date format", availableSince = "2.4.3.Final") String dateFormat; @@ -57,5 +54,5 @@ protected static void configureDateFormat(ObjectMapper mapper, String format, St mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); mapper.setDateFormat(df); } - + } diff --git a/common/src/main/java/io/apicurio/registry/types/ArtifactMediaTypes.java b/common/src/main/java/io/apicurio/registry/types/ArtifactMediaTypes.java index 56efe29eb6..6e662c0846 100644 --- a/common/src/main/java/io/apicurio/registry/types/ArtifactMediaTypes.java +++ b/common/src/main/java/io/apicurio/registry/types/ArtifactMediaTypes.java @@ -10,5 +10,5 @@ public final class ArtifactMediaTypes { public static final MediaType PROTO = new MediaType("application", "x-protobuf"); public static final MediaType GRAPHQL = new MediaType("application", "graphql"); public static final MediaType BINARY = new MediaType("application", "octet-stream"); - + } diff --git a/common/src/main/java/io/apicurio/registry/types/ArtifactType.java b/common/src/main/java/io/apicurio/registry/types/ArtifactType.java index 96926e22c1..e7d6c90dc0 100644 --- a/common/src/main/java/io/apicurio/registry/types/ArtifactType.java +++ b/common/src/main/java/io/apicurio/registry/types/ArtifactType.java @@ -3,7 +3,8 @@ public class ArtifactType { - private ArtifactType() {} + private ArtifactType() { + } // TODO: Turn into enum, which can contain both a string value and a numeric identifier. // See io.apicurio.registry.storage.impl.kafkasql.serde.ArtifactTypeOrdUtil diff --git a/common/src/main/java/io/apicurio/registry/types/CheckedRegistryException.java b/common/src/main/java/io/apicurio/registry/types/CheckedRegistryException.java index fc668483aa..ad5ae2bb9f 100644 --- a/common/src/main/java/io/apicurio/registry/types/CheckedRegistryException.java +++ b/common/src/main/java/io/apicurio/registry/types/CheckedRegistryException.java @@ -3,11 +3,10 @@ /** * Generic checked project exception. *

- * Use this exception if you expect the caller would want to handle the exception, - * possibly making different decisions based on circumstances. + * Use this exception if you expect the caller would want to handle the exception, possibly making different + * decisions based on circumstances. *

* This class is intended for extension. Create a more specific exception. - * */ public abstract class CheckedRegistryException extends Exception { diff --git a/common/src/main/java/io/apicurio/registry/types/RegistryException.java b/common/src/main/java/io/apicurio/registry/types/RegistryException.java index 5ee01c9ee6..08120d00c1 100644 --- a/common/src/main/java/io/apicurio/registry/types/RegistryException.java +++ b/common/src/main/java/io/apicurio/registry/types/RegistryException.java @@ -3,10 +3,8 @@ /** * Generic project exception. *

- * Use this exception if you expect the caller would NOT want to handle the exception, - * possibly letting it bubble up and return a generic 500 error to the user, - * or there is a special mechanism to deal with it. - * + * Use this exception if you expect the caller would NOT want to handle the exception, possibly letting it + * bubble up and return a generic 500 error to the user, or there is a special mechanism to deal with it. */ // TODO Should be abstract and more specific exception should be used public class RegistryException extends RuntimeException { @@ -28,7 +26,8 @@ public RegistryException(Throwable cause) { super(cause); } - public RegistryException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + public RegistryException(String message, Throwable cause, boolean enableSuppression, + boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } } diff --git a/common/src/main/java/io/apicurio/registry/utils/ArtifactIdValidator.java b/common/src/main/java/io/apicurio/registry/utils/ArtifactIdValidator.java index d7094934a9..6028474a20 100644 --- a/common/src/main/java/io/apicurio/registry/utils/ArtifactIdValidator.java +++ b/common/src/main/java/io/apicurio/registry/utils/ArtifactIdValidator.java @@ -9,7 +9,7 @@ public class ArtifactIdValidator { public static final String ARTIFACT_ID_ERROR_MESSAGE = "Character % and non ASCII characters are not allowed in artifact IDs."; private ArtifactIdValidator() { - //utility class + // utility class } public static boolean isGroupIdAllowed(String groupId) { diff --git a/common/src/main/java/io/apicurio/registry/utils/Functional.java b/common/src/main/java/io/apicurio/registry/utils/Functional.java index 379778eb3a..0cc8260282 100644 --- a/common/src/main/java/io/apicurio/registry/utils/Functional.java +++ b/common/src/main/java/io/apicurio/registry/utils/Functional.java @@ -5,33 +5,29 @@ public final class Functional { private Functional() { } - @FunctionalInterface public interface RunnableEx { void run() throws X; } - @FunctionalInterface public interface Runnable1Ex { void run(T value) throws X; } - public static Runnable1Ex runnable1ExNoop() { - return x -> {}; + return x -> { + }; } - @FunctionalInterface public interface FunctionEx { R run() throws X; } - @FunctionalInterface public interface Function1Ex { diff --git a/common/src/main/java/io/apicurio/registry/utils/IoBufferedInputStream.java b/common/src/main/java/io/apicurio/registry/utils/IoBufferedInputStream.java index b45b54a21e..d6e94b4560 100644 --- a/common/src/main/java/io/apicurio/registry/utils/IoBufferedInputStream.java +++ b/common/src/main/java/io/apicurio/registry/utils/IoBufferedInputStream.java @@ -23,5 +23,4 @@ public void close() throws IOException { super.close(); } - } diff --git a/common/src/main/java/io/apicurio/registry/utils/IoUtil.java b/common/src/main/java/io/apicurio/registry/utils/IoUtil.java index 690de4911f..83d4695ea4 100644 --- a/common/src/main/java/io/apicurio/registry/utils/IoUtil.java +++ b/common/src/main/java/io/apicurio/registry/utils/IoUtil.java @@ -17,9 +17,8 @@ private static ByteArrayOutputStream toBaos(InputStream stream) throws IOExcepti } /** - * Close auto-closeable, - * unchecked IOException is thrown for any IO exception, - * IllegalStateException for all others. + * Close auto-closeable, unchecked IOException is thrown for any IO exception, IllegalStateException for + * all others. * * @param closeable the closeable */ @@ -48,8 +47,7 @@ public static void closeIgnore(AutoCloseable closeable) { } /** - * Get byte array from stream. - * Stream is closed at the end. + * Get byte array from stream. Stream is closed at the end. * * @param stream the stream * @return stream as a byte array @@ -83,8 +81,7 @@ public static byte[] toBytes(InputStream stream, boolean closeStream) { } /** - * Get string from stream. - * Stream is closed at the end. + * Get string from stream. Stream is closed at the end. * * @param stream the stream * @return stream as a string @@ -128,8 +125,8 @@ public static byte[] toBytes(String string) { public static InputStream toStream(String content) { return new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); } - - public static InputStream toStream(byte [] content) { + + public static InputStream toStream(byte[] content) { return new ByteArrayInputStream(content); } diff --git a/common/src/main/java/io/apicurio/registry/utils/JAXRSClientUtil.java b/common/src/main/java/io/apicurio/registry/utils/JAXRSClientUtil.java index bc10c3322a..3680647e89 100644 --- a/common/src/main/java/io/apicurio/registry/utils/JAXRSClientUtil.java +++ b/common/src/main/java/io/apicurio/registry/utils/JAXRSClientUtil.java @@ -1,17 +1,19 @@ package io.apicurio.registry.utils; -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLSession; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; import jakarta.ws.rs.client.Client; import jakarta.ws.rs.client.ClientBuilder; + import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.X509Certificate; import java.util.concurrent.TimeUnit; +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSession; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + public class JAXRSClientUtil { private static class NullHostnameVerifier implements HostnameVerifier { @@ -23,20 +25,23 @@ public boolean verify(String hostname, SSLSession session) { } } - private static TrustManager[] nullTrustManager = new TrustManager[]{new X509TrustManager() { + private static TrustManager[] nullTrustManager = new TrustManager[] { new X509TrustManager() { @Override public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[0]; } @Override - public void checkClientTrusted(X509Certificate[] certs, String authType) {} + public void checkClientTrusted(X509Certificate[] certs, String authType) { + } @Override - public void checkServerTrusted(X509Certificate[] certs, String authType) {} - }}; + public void checkServerTrusted(X509Certificate[] certs, String authType) { + } + } }; - public static Client getJAXRSClient(boolean skipSSLValidation) throws KeyManagementException, NoSuchAlgorithmException { + public static Client getJAXRSClient(boolean skipSSLValidation) + throws KeyManagementException, NoSuchAlgorithmException { ClientBuilder cb = ClientBuilder.newBuilder(); cb.connectTimeout(10, TimeUnit.SECONDS); @@ -45,8 +50,7 @@ public static Client getJAXRSClient(boolean skipSSLValidation) throws KeyManagem if (skipSSLValidation) { SSLContext nullSSLContext = SSLContext.getInstance("TLSv1.2"); nullSSLContext.init(null, nullTrustManager, null); - cb.hostnameVerifier(NullHostnameVerifier.INSTANCE) - .sslContext(nullSSLContext); + cb.hostnameVerifier(NullHostnameVerifier.INSTANCE).sslContext(nullSSLContext); newClient = cb.build(); } else { diff --git a/common/src/main/java/io/apicurio/registry/utils/OptionalBean.java b/common/src/main/java/io/apicurio/registry/utils/OptionalBean.java index ab2f05046f..8ea2411d13 100644 --- a/common/src/main/java/io/apicurio/registry/utils/OptionalBean.java +++ b/common/src/main/java/io/apicurio/registry/utils/OptionalBean.java @@ -7,9 +7,7 @@ import java.util.stream.Stream; /** - * Proxyable alternative to java.util.Optional. - * Optional cannot be used with CDI because it is final. - * + * Proxyable alternative to java.util.Optional. Optional cannot be used with CDI because it is final. */ public class OptionalBean { private static final OptionalBean EMPTY = new OptionalBean<>(); diff --git a/common/src/main/java/io/apicurio/registry/utils/PropertiesUtil.java b/common/src/main/java/io/apicurio/registry/utils/PropertiesUtil.java index 75dd589b8c..30c1f95237 100644 --- a/common/src/main/java/io/apicurio/registry/utils/PropertiesUtil.java +++ b/common/src/main/java/io/apicurio/registry/utils/PropertiesUtil.java @@ -1,11 +1,11 @@ package io.apicurio.registry.utils; +import jakarta.enterprise.inject.spi.InjectionPoint; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.spi.ConfigProviderResolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import jakarta.enterprise.inject.spi.InjectionPoint; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -29,28 +29,21 @@ public class PropertiesUtil { public static Properties properties(InjectionPoint ip) { RegistryProperties cp = ip.getAnnotated().getAnnotation(RegistryProperties.class); if (cp == null) { - throw new IllegalArgumentException( - ip.getMember() + " is not annotated with @RegistryProperties" - ); + throw new IllegalArgumentException(ip.getMember() + " is not annotated with @RegistryProperties"); } String[] prefixes = Stream.of(cp.value()) - .map(pfx -> pfx.isEmpty() || pfx.endsWith(".") ? pfx : pfx + ".") - .distinct() + .map(pfx -> pfx.isEmpty() || pfx.endsWith(".") ? pfx : pfx + ".").distinct() .toArray(String[]::new); if (prefixes.length == 0) { - throw new IllegalArgumentException( - "Annotation @RegistryProperties on " + ip.getMember() + - " is missing non-empty 'value' attribute" - ); + throw new IllegalArgumentException("Annotation @RegistryProperties on " + ip.getMember() + + " is missing non-empty 'value' attribute"); } Properties properties = new Properties(); Config config = ConfigProviderResolver.instance().getConfig(); if (debug && log.isDebugEnabled()) { - String dump = StreamSupport - .stream(config.getPropertyNames().spliterator(), false) - .sorted() + String dump = StreamSupport.stream(config.getPropertyNames().spliterator(), false).sorted() .map(key -> key + "=" + config.getOptionalValue(key, String.class).orElse("")) .collect(Collectors.joining("\n ", " ", "\n")); log.debug("Injecting config properties with prefixes {} into {} from the following...\n{}", @@ -87,10 +80,7 @@ public static Properties properties(InjectionPoint ip) { } if (debug && log.isDebugEnabled()) { - String dump = properties - .stringPropertyNames() - .stream() - .sorted() + String dump = properties.stringPropertyNames().stream().sorted() .map(key -> key + "=" + properties.getProperty(key)) .collect(Collectors.joining("\n ", " ", "\n")); log.debug("... selected/prefix-stripped properties are:\n{}", dump); diff --git a/common/src/main/java/io/apicurio/registry/utils/RegistryProperties.java b/common/src/main/java/io/apicurio/registry/utils/RegistryProperties.java index 750d12d9bf..43b7fd5252 100644 --- a/common/src/main/java/io/apicurio/registry/utils/RegistryProperties.java +++ b/common/src/main/java/io/apicurio/registry/utils/RegistryProperties.java @@ -7,7 +7,7 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME; @Retention(RUNTIME) -@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) +@Target({ ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER }) public @interface RegistryProperties { String[] value(); diff --git a/common/src/main/java/io/apicurio/registry/utils/StringUtil.java b/common/src/main/java/io/apicurio/registry/utils/StringUtil.java index 211f8c9863..57ce9b2136 100644 --- a/common/src/main/java/io/apicurio/registry/utils/StringUtil.java +++ b/common/src/main/java/io/apicurio/registry/utils/StringUtil.java @@ -2,12 +2,10 @@ public class StringUtil { - public static boolean isEmpty(String string) { return string == null || string.isEmpty(); } - public static String limitStr(String value, int limit) { return limitStr(value, limit, false); } @@ -19,7 +17,6 @@ public static String asLowerCase(String value) { return value.toLowerCase(); } - public static String limitStr(String value, int limit, boolean withEllipsis) { if (StringUtil.isEmpty(value)) { return value; diff --git a/common/src/test/java/io/apicurio/registry/model/ModelTypesTest.java b/common/src/test/java/io/apicurio/registry/model/ModelTypesTest.java index ffe1515152..d0e52a7ae9 100644 --- a/common/src/test/java/io/apicurio/registry/model/ModelTypesTest.java +++ b/common/src/test/java/io/apicurio/registry/model/ModelTypesTest.java @@ -1,6 +1,5 @@ package io.apicurio.registry.model; - import jakarta.validation.ValidationException; import org.junit.jupiter.api.Test; @@ -8,7 +7,6 @@ public class ModelTypesTest { - @Test void testGroupId() { assertTrue(GroupId.DEFAULT.isDefaultGroup()); @@ -26,7 +24,6 @@ void testGroupId() { new GroupId("x".repeat(512)); } - @Test void testArtifactId() { assertThrows(ValidationException.class, () -> new ArtifactId(null)); @@ -38,7 +35,6 @@ void testArtifactId() { new ArtifactId("x".repeat(512)); } - @Test void testVersionId() { assertThrows(ValidationException.class, () -> new VersionId(null)); @@ -52,7 +48,6 @@ void testVersionId() { new VersionId("x".repeat(256)); } - @Test void testBranchId() { assertEquals(BranchId.LATEST, new BranchId("latest")); @@ -68,7 +63,6 @@ void testBranchId() { new BranchId("x".repeat(256)); } - @Test void testGAandGAV() { var ga1 = new GA(null, "artifact1"); diff --git a/common/src/test/java/io/apicurio/registry/model/VersionExpressionParserTest.java b/common/src/test/java/io/apicurio/registry/model/VersionExpressionParserTest.java index 9eb02b85db..99c126a5b0 100644 --- a/common/src/test/java/io/apicurio/registry/model/VersionExpressionParserTest.java +++ b/common/src/test/java/io/apicurio/registry/model/VersionExpressionParserTest.java @@ -1,6 +1,5 @@ package io.apicurio.registry.model; - import jakarta.validation.ValidationException; import org.junit.jupiter.api.Test; @@ -9,24 +8,30 @@ public class VersionExpressionParserTest { - @Test void testVersionExpressionParser() { var ga1 = new GA(null, "artifact1"); - assertEquals(new GAV(ga1, new VersionId("version1")), VersionExpressionParser.parse(ga1, "branch=latest", this::getBranchTip)); - assertEquals(new GAV(ga1, new VersionId("version2")), VersionExpressionParser.parse(ga1, "branch=1.0.x", this::getBranchTip)); - - assertEquals(new GAV(ga1, new VersionId("version3")), VersionExpressionParser.parse(ga1, "version3", this::getBranchTip)); - - assertThrows(ValidationException.class, () -> VersionExpressionParser.parse(ga1, "branch =1.0.x", this::getBranchTip)); - assertThrows(ValidationException.class, () -> VersionExpressionParser.parse(ga1, "branch 1.0.x", this::getBranchTip)); - assertThrows(ValidationException.class, () -> VersionExpressionParser.parse(ga1, "ranch=1.0.x", this::getBranchTip)); - assertThrows(ValidationException.class, () -> VersionExpressionParser.parse(ga1, "branch=1.0.@", this::getBranchTip)); - assertThrows(ValidationException.class, () -> VersionExpressionParser.parse(ga1, "branch=", this::getBranchTip)); + assertEquals(new GAV(ga1, new VersionId("version1")), + VersionExpressionParser.parse(ga1, "branch=latest", this::getBranchTip)); + assertEquals(new GAV(ga1, new VersionId("version2")), + VersionExpressionParser.parse(ga1, "branch=1.0.x", this::getBranchTip)); + + assertEquals(new GAV(ga1, new VersionId("version3")), + VersionExpressionParser.parse(ga1, "version3", this::getBranchTip)); + + assertThrows(ValidationException.class, + () -> VersionExpressionParser.parse(ga1, "branch =1.0.x", this::getBranchTip)); + assertThrows(ValidationException.class, + () -> VersionExpressionParser.parse(ga1, "branch 1.0.x", this::getBranchTip)); + assertThrows(ValidationException.class, + () -> VersionExpressionParser.parse(ga1, "ranch=1.0.x", this::getBranchTip)); + assertThrows(ValidationException.class, + () -> VersionExpressionParser.parse(ga1, "branch=1.0.@", this::getBranchTip)); + assertThrows(ValidationException.class, + () -> VersionExpressionParser.parse(ga1, "branch=", this::getBranchTip)); } - private GAV getBranchTip(GA ga, BranchId branchId) { if (BranchId.LATEST.equals(branchId)) { return new GAV(ga, new VersionId("version1")); diff --git a/distro/connect-converter/pom.xml b/distro/connect-converter/pom.xml index b5cd4cfe1c..b3fb3ca91c 100644 --- a/distro/connect-converter/pom.xml +++ b/distro/connect-converter/pom.xml @@ -1,105 +1,106 @@ - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry-distro - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-distro + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-distro-connect-converter - apicurio-registry-distro-connect-converter + apicurio-registry-distro-connect-converter + apicurio-registry-distro-connect-converter - - converter-distribution - 2.31 - 1.2.2 - 2.4.0 - - - - - - org.apache.kafka - connect-json - ${distro.version.kafka} - - - + + converter-distribution + 2.31 + 1.2.2 + 2.4.0 + + - - io.apicurio - apicurio-registry-utils-converter - - - io.apicurio - apicurio-registry-java-sdk - + + org.apache.kafka + connect-json + ${distro.version.kafka} + + + + + + io.apicurio + apicurio-registry-utils-converter + + + io.apicurio + apicurio-registry-java-sdk + + - - - - org.apache.maven.plugins - maven-assembly-plugin - - - default - package - - single - - - apicurio-kafka-connect-converter-${project.version} - false - true - - src/assembly/${assembly.descriptor}.xml - - ${tar.long.file.mode} - - - - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - non-existant - - - attach-test-sources - non-existant - - - - - org.apache.maven.plugins - maven-surefire-plugin - - true - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - non-existant - - test-jar - - - - - - + + + + org.apache.maven.plugins + maven-assembly-plugin + + + default + + single + + package + + apicurio-kafka-connect-converter-${project.version} + false + true + + + src/assembly/${assembly.descriptor}.xml + + ${tar.long.file.mode} + + + + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + non-existant + + + attach-test-sources + non-existant + + + + + org.apache.maven.plugins + maven-surefire-plugin + + true + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + test-jar + + non-existant + + + + + diff --git a/distro/docker/pom.xml b/distro/docker/pom.xml index 815a3b8699..7f9371a6cf 100644 --- a/distro/docker/pom.xml +++ b/distro/docker/pom.xml @@ -1,85 +1,85 @@ - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry-distro - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-distro + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-distro-docker - pom - apicurio-registry-distro-docker + apicurio-registry-distro-docker + pom + apicurio-registry-distro-docker - - app-files/apicurio-registry-app-${project.version}-all.tar.gz - + + app-files/apicurio-registry-app-${project.version}-all.tar.gz + - - - io.apicurio - apicurio-registry-app - provided - - + + + io.apicurio + apicurio-registry-app + provided + + - + - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-target-app - process-resources - - copy-resources - - - ${basedir}/target/docker/app-files - - - ${basedir}/../../app/target - false - - apicurio-*.tar.gz - apicurio-*runner - - - - - - - copy-docker - process-resources - - copy-resources - - - ${basedir}/target/docker - - - src/main/docker - true - - - - - - + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-target-app + + copy-resources + + process-resources + + ${basedir}/target/docker/app-files + + + ${basedir}/../../app/target + false + + apicurio-*.tar.gz + apicurio-*runner + + + + + + + copy-docker + + copy-resources + + process-resources + + ${basedir}/target/docker + + + src/main/docker + true + + + + + + - - org.apache.maven.plugins - maven-deploy-plugin - - true - - + + org.apache.maven.plugins + maven-deploy-plugin + + true + + - - + + diff --git a/distro/pom.xml b/distro/pom.xml index 00a061fd0f..3cabc5f318 100644 --- a/distro/pom.xml +++ b/distro/pom.xml @@ -1,17 +1,17 @@ - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - - apicurio-registry-distro - pom - apicurio-registry-distro - - connect-converter - docker - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + + apicurio-registry-distro + pom + apicurio-registry-distro + + connect-converter + docker + diff --git a/docs/pom.xml b/docs/pom.xml index 69b15b173f..31ab03ffac 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -1,131 +1,128 @@ - - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-docs - pom - apicurio-registry-docs + apicurio-registry-docs + pom + apicurio-registry-docs + Open Source API & Schema Registry - https://www.apicur.io/ - Open Source API & Schema Registry + https://www.apicur.io/ - - Red Hat - https://www.redhat.com - + + Red Hat + https://www.redhat.com + - - - Apache License Version 2.0 - https://repository.jboss.org/licenses/apache-2.0.txt - repo - - + + + Apache License Version 2.0 + https://repository.jboss.org/licenses/apache-2.0.txt + repo + + - - GitHub - https://github.com/apicurio/apicurio-registry/issues - + + + EricWittmann + Eric Wittmann + eric.wittmann@redhat.com + Red Hat + + Project Lead + Developer + + -5 + + + jsenko + Jakub Senko + m@jsenko.net + Red Hat + + Developer + + +1 + + + alesj + Aleš Justin + ajustin@redhat.com + Red Hat + + Developer + + +1 + + - - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - + + rest-api + - - - Eric Wittmann - EricWittmann - eric.wittmann@redhat.com - Red Hat - - Project Lead - Developer - - -5 - - - Jakub Senko - jsenko - m@jsenko.net - Red Hat - - Developer - - +1 - - - Aleš Justin - alesj - ajustin@redhat.com - Red Hat - - Developer - - +1 - - + + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + - - rest-api - + + GitHub + https://github.com/apicurio/apicurio-registry/issues + - - - - io.apicurio - apicurio-registry-app - ${project.version} - - + + + + io.apicurio + apicurio-registry-app + ${project.version} + + - - - docs-generation - - - !skipDocsGen - - - - - - dev.jbang - jbang-maven-plugin - 0.0.8 - false - - - run - process-resources - - run - - - - - ${project.version} - ${project.basedir} - ${apicurio-common-app-components.version} - - - - - - - - - + + + docs-generation + + + !skipDocsGen + + + + + + dev.jbang + jbang-maven-plugin + 0.0.8 + false + + + run + + run + + process-resources + + + + ${project.version} + ${project.basedir} + ${apicurio-common-app-components.version} + + + + + + + + + diff --git a/docs/rest-api/pom.xml b/docs/rest-api/pom.xml index 175e6f9cc9..6f0fec0880 100644 --- a/docs/rest-api/pom.xml +++ b/docs/rest-api/pom.xml @@ -1,111 +1,108 @@ - - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry-docs - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-docs + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-docs-rest-api - pom - apicurio-registry-docs-rest-api + apicurio-registry-docs-rest-api + pom + apicurio-registry-docs-rest-api + Open Source API & Schema Registry - https://www.apicur.io/ - Open Source API & Schema Registry + https://www.apicur.io/ - - Red Hat - https://www.redhat.com - + + Red Hat + https://www.redhat.com + - - - Apache License Version 2.0 - https://repository.jboss.org/licenses/apache-2.0.txt - repo - - + + + Apache License Version 2.0 + https://repository.jboss.org/licenses/apache-2.0.txt + repo + + - - GitHub - https://github.com/apicurio/apicurio-registry/issues - + + + EricWittmann + Eric Wittmann + eric.wittmann@redhat.com + Red Hat + + Project Lead + Developer + + -5 + + + jsenko + Jakub Senko + m@jsenko.net + Red Hat + + Developer + + +1 + + + alesj + Aleš Justin + ajustin@redhat.com + Red Hat + + Developer + + +1 + + - - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - + + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + - - - Eric Wittmann - EricWittmann - eric.wittmann@redhat.com - Red Hat - - Project Lead - Developer - - -5 - - - Jakub Senko - jsenko - m@jsenko.net - Red Hat - - Developer - - +1 - - - Aleš Justin - alesj - ajustin@redhat.com - Red Hat - - Developer - - +1 - - + + GitHub + https://github.com/apicurio/apicurio-registry/issues + - - + + - - org.apache.maven.plugins - maven-assembly-plugin - - - assembly - package - - single - - - ${project.artifactId}-${project.version} - true - - src/main/assembly/static-assembly.xml - - - 0755 - - ${tar.long.file.mode} - - - - + + org.apache.maven.plugins + maven-assembly-plugin + + + assembly + + single + + package + + ${project.artifactId}-${project.version} + true + + src/main/assembly/static-assembly.xml + + + 0755 + + ${tar.long.file.mode} + + + + - - + + diff --git a/examples/README.md b/examples/README.md index c858bf5a93..c3e45e4076 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,8 +1,8 @@ # Apicurio Registry Example Applications This repository contains a set of example applications (mostly Kafka applications) that use the -Apicurio Registry as part of their workflow. The registry is typically used to store schemas +Apicurio Registry as part of their workflow. The registry is typically used to store schemas used by Kafka serializer and deserializer classes. These serdes classes will fetch the schema -from the registry for use during producing or consuming operations (to serializer, deserializer, +from the registry for use during producing or consuming operations (to serializer, deserializer, or validate the Kafka message payload). Each example in this repository attempts to demonstrate some specific use-case or configuration. @@ -23,13 +23,13 @@ Simply run ``docker-compose -f examples/tools/kafka-compose/kafka-compose.yaml A list of examples is included below, with descriptions and explanations of each covered use-case. ## Simple Avro Example -This example application demonstrates the basics of using the registry in a very simple Kafka -publish/subscribe application using Apache Avro as the schema technology used to serialize -and deserialize message payloads. +This example application demonstrates the basics of using the registry in a very simple Kafka +publish/subscribe application using Apache Avro as the schema technology used to serialize +and deserialize message payloads. ## Simple JSON Schema Example -This example application demonstrates the basics of using the registry in a very simple Kafka -publish/subscribe application using JSON Schema to validate message payloads when both producing +This example application demonstrates the basics of using the registry in a very simple Kafka +publish/subscribe application using JSON Schema to validate message payloads when both producing and consuming them. JSON Schema is not a serialization technology, but rather is only used for validation. Therefore, it can be enabled or disabled in the serializer and deserializer. @@ -42,7 +42,7 @@ is essentially the same as the Simple Avro Example, but using a Confluent serial Apicurio Registry deserializer. ## Avro Bean Example -This example demonstrates how to use Avro as the schema and serialization technology while +This example demonstrates how to use Avro as the schema and serialization technology while using a Java Bean as the Kafka message payload. This is essentially the same as the Simple Avro Example, but using a java bean instead of a `GenericRecord` as the message payload. @@ -55,18 +55,18 @@ the same as the Simple Avro Example, except instead of using one of the default Registry Global ID strategies, it uses a custom one. ## Simple Avro Maven Example -This example application demonstrates how to use the Apicurio Registry maven plugin to +This example application demonstrates how to use the Apicurio Registry maven plugin to pre-register an Avro schema so that it does not need to be embedded within the producer application. Note that this example will fail unless the maven plugin is executed before the Java application. See the javadoc in the example for details. ## Rest Client example -This example application demonstrates how to use the Apicurio Registry rest client to create, -delete, or fetch schemas. This example contains two basic java application, one showing how to -improve the logs by logging all the request and response headers and a basic example on how to +This example application demonstrates how to use the Apicurio Registry rest client to create, +delete, or fetch schemas. This example contains two basic java application, one showing how to +improve the logs by logging all the request and response headers and a basic example on how to use the client. ## Mix Avro Schemas Example This example application showcases an scenario where Apache Avro messages are published to the same Kafka topic using different Avro schemas. This example uses the Apicurio Registry serdes classes to serialize -and deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. \ No newline at end of file +and deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. diff --git a/examples/avro-bean/pom.xml b/examples/avro-bean/pom.xml index b0c12a01d7..fac71a4dd6 100644 --- a/examples/avro-bean/pom.xml +++ b/examples/avro-bean/pom.xml @@ -1,43 +1,42 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-avro-bean - jar + apicurio-registry-examples-avro-bean + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-simple - ${slf4j.version} - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + diff --git a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java index 2984e18a36..ad5abb2a7c 100644 --- a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java +++ b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/AvroBeanExample.java @@ -16,11 +16,11 @@ package io.apicurio.registry.examples.avro.bean; -import java.time.Duration; -import java.util.Collections; -import java.util.Date; -import java.util.Properties; - +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.serde.avro.ReflectAvroDatumProvider; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -32,28 +32,24 @@ import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; -import io.apicurio.registry.serde.SerdeConfig; -import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; -import io.apicurio.registry.serde.avro.AvroKafkaSerdeConfig; -import io.apicurio.registry.serde.avro.AvroKafkaSerializer; -import io.apicurio.registry.serde.avro.ReflectAvroDatumProvider; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type. The following aspects are demonstrated: *

    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a {@link GreetingBean}
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Avro schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a {@link GreetingBean}
  16. *
- * * Pre-requisites: - * *
    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -66,8 +62,7 @@ public class AvroBeanExample { private static final String TOPIC_NAME = AvroBeanExample.class.getSimpleName(); private static final String SUBJECT_NAME = "Greeting"; - - public static final void main(String [] args) throws Exception { + public static final void main(String[] args) throws Exception { System.out.println("Starting example " + AvroBeanExample.class.getSimpleName()); String topicName = TOPIC_NAME; String subjectName = SUBJECT_NAME; @@ -83,9 +78,9 @@ public static final void main(String [] args) throws Exception { greeting.setMessage("Hello (" + producedMessages++ + ")!"); greeting.setTime(System.currentTimeMillis()); - // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, greeting); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + greeting); producer.send(producedRecord); Thread.sleep(100); @@ -115,10 +110,12 @@ public static final void main(String [] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GreetingBean greeting = record.value(); - System.out.println("Consumed a message: " + greeting.getMessage() + " @ " + new Date(greeting.getTime())); - }); + } else + records.forEach(record -> { + GreetingBean greeting = record.value(); + System.out.println("Consumed a message: " + greeting.getMessage() + " @ " + + new Date(greeting.getTime())); + }); } } finally { consumer.close(); @@ -144,10 +141,11 @@ private static Producer createKafkaProducer() { // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - // Use Java reflection as the Avro Datum Provider - this also generates an Avro schema from the java bean + // Use Java reflection as the Avro Datum Provider - this also generates an Avro schema from the java + // bean props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -170,17 +168,18 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // Use Java reflection as the Avro Datum Provider props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -199,13 +198,16 @@ public static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java index b8c64dd589..be5090f667 100644 --- a/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java +++ b/examples/avro-bean/src/main/java/io/apicurio/registry/examples/avro/bean/GreetingBean.java @@ -20,10 +20,10 @@ * @author eric.wittmann@gmail.com */ public class GreetingBean { - + private String message; private long time; - + /** * Constructor. */ diff --git a/examples/avro-maven-with-references-auto/pom.xml b/examples/avro-maven-with-references-auto/pom.xml index 69aaa07abe..46e1017de1 100644 --- a/examples/avro-maven-with-references-auto/pom.xml +++ b/examples/avro-maven-with-references-auto/pom.xml @@ -1,51 +1,47 @@ - - - apicurio-registry-examples - io.apicurio - 3.0.0-SNAPSHOT - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + - apicurio-registry-examples-avro-maven-with-references-auto - jar + apicurio-registry-examples-avro-maven-with-references-auto + jar - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - register-artifact - - register - - process-test-resources - - http://localhost:8080/apis/registry/v3 - - - avro-maven-with-references-auto - TradeRaw - 2.0 - AVRO - - ${project.basedir}/src/main/resources/schemas/TradeRaw.avsc - - FIND_OR_CREATE_VERSION - true - true - - - - - - - - - + + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + register-artifact + + register + + process-test-resources + + http://localhost:8080/apis/registry/v3 + + + avro-maven-with-references-auto + TradeRaw + 2.0 + AVRO + ${project.basedir}/src/main/resources/schemas/TradeRaw.avsc + FIND_OR_CREATE_VERSION + true + true + + + + + + + + + diff --git a/examples/avro-maven-with-references/pom.xml b/examples/avro-maven-with-references/pom.xml index fcdd297fac..f9b4aab70b 100644 --- a/examples/avro-maven-with-references/pom.xml +++ b/examples/avro-maven-with-references/pom.xml @@ -1,64 +1,58 @@ - - - apicurio-registry-examples - io.apicurio - 3.0.0-SNAPSHOT - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + - apicurio-registry-examples-avro-maven-with-references - jar + apicurio-registry-examples-avro-maven-with-references + jar - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - register-artifact - - register - - process-test-resources - - http://localhost:8080/apis/registry/v3 - - - avro-maven-with-references - TradeKey - 2.0 - AVRO - - ${project.basedir}/src/main/resources/schemas/TradeKey.avsc - - FIND_OR_CREATE_VERSION - true - - - com.kubetrade.schema.common.Exchange - test-group - Exchange - 2.0 - AVRO - - ${project.basedir}/src/main/resources/schemas/Exchange.avsc - - FIND_OR_CREATE_VERSION - true - - - - - - - - - - - + + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + register-artifact + + register + + process-test-resources + + http://localhost:8080/apis/registry/v3 + + + avro-maven-with-references + TradeKey + 2.0 + AVRO + ${project.basedir}/src/main/resources/schemas/TradeKey.avsc + FIND_OR_CREATE_VERSION + true + + + com.kubetrade.schema.common.Exchange + test-group + Exchange + 2.0 + AVRO + ${project.basedir}/src/main/resources/schemas/Exchange.avsc + FIND_OR_CREATE_VERSION + true + + + + + + + + + + + diff --git a/examples/confluent-serdes/pom.xml b/examples/confluent-serdes/pom.xml index e96dace9f0..4ba67507c5 100644 --- a/examples/confluent-serdes/pom.xml +++ b/examples/confluent-serdes/pom.xml @@ -1,65 +1,64 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-confluent-serdes - jar + apicurio-registry-examples-confluent-serdes + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - io.confluent - kafka-schema-registry-client - compile - - - io.confluent - kafka-avro-serializer - compile - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-simple - ${slf4j.version} - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + io.confluent + kafka-schema-registry-client + compile + + + io.confluent + kafka-avro-serializer + compile + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + - - - confluent - Confluent - https://packages.confluent.io/maven/ - - - jitpack.io - https://jitpack.io - - + + + confluent + Confluent + https://packages.confluent.io/maven/ + + + jitpack.io + https://jitpack.io + + diff --git a/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java b/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java index f7cc70193c..934ba7d963 100644 --- a/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java +++ b/examples/confluent-serdes/src/main/java/io/apicurio/registry/examples/confluent/serdes/ConfluentSerdesExample.java @@ -16,13 +16,8 @@ package io.apicurio.registry.examples.confluent.serdes; -import java.time.Duration; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.RestService; import io.confluent.kafka.serializers.KafkaAvroSerializer; @@ -40,27 +35,27 @@ import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; -import io.apicurio.registry.serde.SerdeConfig; -import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario where applications use a mix of Confluent and Apicurio Registry serdes classes. This - * example uses the Confluent serializer for the producer and the Apicurio Registry deserializer - * class for the consumer. - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario + * where applications use a mix of Confluent and Apicurio Registry serdes classes. This example uses the + * Confluent serializer for the producer and the Apicurio Registry deserializer class for the consumer. *
    - *
  1. Configuring a Confluent Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Confluent Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Avro schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
- * * Pre-requisites: - * *
    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -75,8 +70,7 @@ public class ConfluentSerdesExample { private static final String SUBJECT_NAME = "Greeting"; private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Timestamp\",\"type\":\"long\"}]}"; - - public static final void main(String [] args) throws Exception { + public static final void main(String[] args) throws Exception { System.out.println("Starting example " + ConfluentSerdesExample.class.getSimpleName()); String topicName = TOPIC_NAME; String subjectName = SUBJECT_NAME; @@ -97,7 +91,8 @@ public static final void main(String [] args) throws Exception { record.put("Timestamp", now.getTime()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + record); producer.send(producedRecord); Thread.sleep(100); @@ -127,10 +122,12 @@ public static final void main(String [] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Timestamp"))); - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Timestamp"))); + }); } } finally { consumer.close(); @@ -155,13 +152,16 @@ private static Producer createKafkaProducer() { RestService restService = new RestService(CCOMPAT_API_URL); final Map restServiceProperties = new HashMap<>(); - //If auth is enabled using the env var, we try to configure it + // If auth is enabled using the env var, we try to configure it if (Boolean.parseBoolean(System.getenv("CONFIGURE_AUTH"))) { restServiceProperties.put("basic.auth.credentials.source", "USER_INFO"); - restServiceProperties.put("schema.registry.basic.auth.user.info", String.format("%s:%s", System.getenv(SerdeConfig.AUTH_CLIENT_ID), System.getenv(SerdeConfig.AUTH_CLIENT_SECRET))); + restServiceProperties.put("schema.registry.basic.auth.user.info", + String.format("%s:%s", System.getenv(SerdeConfig.AUTH_CLIENT_ID), + System.getenv(SerdeConfig.AUTH_CLIENT_SECRET))); } - CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restService, 100, restServiceProperties); + CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restService, 100, + restServiceProperties); Map properties = new HashMap<>(); @@ -169,14 +169,16 @@ private static Producer createKafkaProducer() { properties.put("schema.registry.url", CCOMPAT_API_URL); properties.put("auto.register.schemas", "true"); // Map the topic name to the artifactId in the registry - properties.put("value.subject.name.strategy", "io.confluent.kafka.serializers.subject.TopicRecordNameStrategy"); + properties.put("value.subject.name.strategy", + "io.confluent.kafka.serializers.subject.TopicRecordNameStrategy"); // Use the Confluent provided Kafka Serializer for Avro KafkaAvroSerializer valueSerializer = new KafkaAvroSerializer(schemaRegistryClient, properties); StringSerializer keySerializer = new StringSerializer(); // Create the Kafka producer - Producer producer = new KafkaProducer(props, keySerializer, valueSerializer); + Producer producer = new KafkaProducer(props, keySerializer, + valueSerializer); return producer; } @@ -194,17 +196,18 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // Enable "Confluent Compatible API" mode in the Apicurio Registry deserializer props.putIfAbsent(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, Boolean.TRUE); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -223,13 +226,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } diff --git a/examples/custom-resolver/pom.xml b/examples/custom-resolver/pom.xml index 3b9e4f6554..0092cac4d2 100644 --- a/examples/custom-resolver/pom.xml +++ b/examples/custom-resolver/pom.xml @@ -1,33 +1,32 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-custom-resolver - jar + apicurio-registry-examples-custom-resolver + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + diff --git a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java index f3c1e49ae4..32c9eda378 100644 --- a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java +++ b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolver.java @@ -37,9 +37,9 @@ import java.util.concurrent.ConcurrentHashMap; /** - * A custom schema resolver that simply uses the Avro schema found in the {@link Config} - * class - and ensures that the schema exists in the registry (so that the deserializer - * is guaranteed to be able to retrieve the exact schema used). + * A custom schema resolver that simply uses the Avro schema found in the {@link Config} class - and ensures + * that the schema exists in the registry (so that the deserializer is guaranteed to be able to retrieve the + * exact schema used). * * @author eric.wittmann@gmail.com */ @@ -48,7 +48,8 @@ public class CustomSchemaResolver extends AbstractSchemaResolver { protected final Map> schemaLookupCacheByContent = new ConcurrentHashMap<>(); /** - * @see io.apicurio.registry.serde.SchemaResolver#configure(java.util.Map, boolean, io.apicurio.registry.serde.SchemaParser) + * @see io.apicurio.registry.serde.SchemaResolver#configure(java.util.Map, boolean, + * io.apicurio.registry.serde.SchemaParser) */ @Override public void configure(Map configs, boolean isKey, SchemaParser schemaMapper) { @@ -56,11 +57,13 @@ public void configure(Map configs, boolean isKey, SchemaParser resolveSchema(String topic, Headers headers, D data, ParsedSchema parsedSchema) { + public SchemaLookupResult resolveSchema(String topic, Headers headers, D data, + ParsedSchema parsedSchema) { System.out.println("[CustomSchemaResolver] Resolving a schema for topic: " + topic); String schema = Config.SCHEMA; @@ -69,7 +72,8 @@ public SchemaLookupResult resolveSchema(String topic, Headers headers, D String artifactId = topic + "-value"; Schema schemaObj = AvroSchemaUtils.parse(schema); - ByteArrayInputStream schemaContent = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); + ByteArrayInputStream schemaContent = new ByteArrayInputStream( + schema.getBytes(StandardCharsets.UTF_8)); // Ensure the schema exists in the schema registry. CreateArtifact createArtifact = new CreateArtifact(); @@ -80,18 +84,14 @@ public SchemaLookupResult resolveSchema(String topic, Headers headers, D createArtifact.getFirstVersion().getContent().setContent(IoUtil.toString(schemaContent)); createArtifact.getFirstVersion().getContent().setContentType("application/json"); - final io.apicurio.registry.rest.client.models.VersionMetaData metaData = client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; - }).getVersion(); + final io.apicurio.registry.rest.client.models.VersionMetaData metaData = client.groups() + .byGroupId("default").artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; + }).getVersion(); - SchemaLookupResult result = SchemaLookupResult.builder() - .groupId(groupId) - .artifactId(artifactId) - .version(String.valueOf(metaData.getVersion())) - .globalId(metaData.getGlobalId()) - .schema(schemaObj) - .rawSchema(schema.getBytes(StandardCharsets.UTF_8)) - .build(); + SchemaLookupResult result = SchemaLookupResult.builder().groupId(groupId).artifactId(artifactId) + .version(String.valueOf(metaData.getVersion())).globalId(metaData.getGlobalId()) + .schema(schemaObj).rawSchema(schema.getBytes(StandardCharsets.UTF_8)).build(); // Also update the schemaCacheByGlobalId - useful if this resolver is used by both // the serializer and deserializer in the same Java application. @@ -104,7 +104,8 @@ public SchemaLookupResult resolveSchema(String topic, Headers headers, D */ @Override public SchemaLookupResult resolveSchemaByArtifactReference(ArtifactReference reference) { - throw new UnsupportedOperationException("resolveSchemaByArtifactReference() is not supported by this implementation."); + throw new UnsupportedOperationException( + "resolveSchemaByArtifactReference() is not supported by this implementation."); } @Override diff --git a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java index 59bd8f6102..69c0228e6c 100644 --- a/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java +++ b/examples/custom-resolver/src/main/java/io/apicurio/registry/examples/custom/resolver/CustomSchemaResolverExample.java @@ -16,11 +16,9 @@ package io.apicurio.registry.examples.custom.resolver; -import java.time.Duration; -import java.util.Collections; -import java.util.Date; -import java.util.Properties; - +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; @@ -35,26 +33,24 @@ import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; -import io.apicurio.registry.serde.SerdeConfig; -import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; -import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type. The following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Register the Avro schema in the registry using a custom Global Id Strategy
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Register the Avro schema in the registry using a custom Global Id Strategy
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
- * * Pre-requisites: - * *
    - *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • - *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • + *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • + *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • *
* * @author eric.wittmann@gmail.com @@ -62,8 +58,7 @@ */ public class CustomSchemaResolverExample { - - public static final void main(String [] args) throws Exception { + public static final void main(String[] args) throws Exception { System.out.println("Starting example " + CustomSchemaResolverExample.class.getSimpleName()); String topicName = Config.TOPIC_NAME; String subjectName = Config.SUBJECT_NAME; @@ -84,7 +79,8 @@ public static final void main(String [] args) throws Exception { record.put("Time", now.getTime()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + record); producer.send(producedRecord); Thread.sleep(100); @@ -114,10 +110,12 @@ public static final void main(String [] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Time"))); + }); } } finally { consumer.close(); @@ -145,7 +143,7 @@ private static Producer createKafkaProducer() { // Use our custom resolver here. props.putIfAbsent(SerdeConfig.SCHEMA_RESOLVER, CustomSchemaResolver.class.getName()); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -167,15 +165,16 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -194,13 +193,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/custom-strategy/pom.xml b/examples/custom-strategy/pom.xml index ea5b52ae6e..2dde58e892 100644 --- a/examples/custom-strategy/pom.xml +++ b/examples/custom-strategy/pom.xml @@ -1,33 +1,32 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-custom-strategy - jar + apicurio-registry-examples-custom-strategy + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + diff --git a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java index 52ab1a95ea..0be3057fca 100644 --- a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java +++ b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomArtifactResolverStrategy.java @@ -7,14 +7,13 @@ public class CustomArtifactResolverStrategy implements ArtifactResolverStrategy< @Override public ArtifactReference artifactReference(String topic, boolean isKey, Object schema) { - return ArtifactReference.builder() - .artifactId("my-artifact-" + topic + (isKey ? "-key" : "-value")) - .build(); + return ArtifactReference.builder().artifactId("my-artifact-" + topic + (isKey ? "-key" : "-value")) + .build(); } @Override public boolean loadSchema() { return false; } - + } diff --git a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java index 97bd1ecd29..297a4830ed 100644 --- a/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java +++ b/examples/custom-strategy/src/main/java/io/apicurio/registry/examples/custom/strategy/CustomStrategyExample.java @@ -47,21 +47,19 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type. The following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Register the Avro schema in the registry using a custom Global Id Strategy
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Register the Avro schema in the registry using a custom Global Id Strategy
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • - *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • + *
  • Kafka must be running on localhost:9092 or the value must be changed accordingly.
  • + *
  • Apicurio Registry must be running on localhost:8080 or the value must be changed accordingly.
  • *
* * @author eric.wittmann@gmail.com @@ -69,7 +67,6 @@ */ public class CustomStrategyExample { - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + CustomStrategyExample.class.getSimpleName()); String topicName = Config.TOPIC_NAME; @@ -111,7 +108,8 @@ public static final void main(String[] args) throws Exception { record.put("Time", now.getTime()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + record); producer.send(producedRecord); Thread.sleep(100); @@ -141,10 +139,12 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Time"))); + }); } } finally { consumer.close(); @@ -172,8 +172,9 @@ private static Producer createKafkaProducer() { props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); props.putIfAbsent(SerdeConfig.FIND_LATEST_ARTIFACT, true); // Use our custom artifact strategy here. - props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, CustomArtifactResolverStrategy.class.getName()); - //Just if security values are present, then we configure them. + props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, + CustomArtifactResolverStrategy.class.getName()); + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -195,15 +196,16 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, Config.REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -222,13 +224,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/debezium-openshift/pom.xml b/examples/debezium-openshift/pom.xml index ca2143e15e..99f158f7df 100644 --- a/examples/debezium-openshift/pom.xml +++ b/examples/debezium-openshift/pom.xml @@ -1,316 +1,297 @@ - - 4.0.0 + + 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-debezium-openshift + apicurio-registry-examples-debezium-openshift - - 1.18.28 - + + 1.18.28 + - + - - io.quarkus - quarkus-resteasy-jackson - - - com.github.java-json-tools - jackson-coreutils - - - + + io.quarkus + quarkus-resteasy-jackson + + + com.github.java-json-tools + jackson-coreutils + + + - - io.quarkus - quarkus-jdbc-mysql - + + io.quarkus + quarkus-jdbc-mysql + - - io.quarkus - quarkus-agroal - + + io.quarkus + quarkus-agroal + - - io.quarkus - quarkus-smallrye-health - + + io.quarkus + quarkus-smallrye-health + - - io.quarkus - quarkus-scheduler - + + io.quarkus + quarkus-scheduler + - - io.quarkus - quarkus-container-image-jib - + + io.quarkus + quarkus-container-image-jib + - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + - - org.apache.kafka - kafka-clients - ${kafka.version} - + + org.apache.kafka + kafka-clients + ${kafka.version} + - - org.projectlombok - lombok - ${lombok.version} - + + org.projectlombok + lombok + ${lombok.version} + - + - - - - io.quarkus - quarkus-maven-plugin - true - - - - build - generate-code - generate-code-tests - - - - - - maven-compiler-plugin - - - maven-surefire-plugin - - - org.jboss.logmanager.LogManager - ${maven.home} - - - + + + + io.quarkus + quarkus-maven-plugin + true + + + + build + generate-code + generate-code-tests + + + + + + maven-compiler-plugin + + + maven-surefire-plugin + + + org.jboss.logmanager.LogManager + ${maven.home} + + + - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - generate-sources - - download - - - - ${registry.url}/apis/registry/v2 - + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + + download + + generate-sources + + ${registry.url}/apis/registry/v2 - - - default - event.block - ${project.build.directory}/resources/block.avsc - true - - - default - io.debezium.connector.mysql.Source - ${project.build.directory}/resources/Source.avsc - true - - - default - io.debezium.connector.schema.Change - ${project.build.directory}/resources/Change.avsc - true - - - default - io.debezium.connector.schema.Column - ${project.build.directory}/resources/Column.avsc - true - - - default - io.debezium.connector.schema.Table - ${project.build.directory}/resources/Table.avsc - true - + + + default + event.block + ${project.build.directory}/resources/block.avsc + true + + + default + io.debezium.connector.mysql.Source + ${project.build.directory}/resources/Source.avsc + true + + + default + io.debezium.connector.schema.Change + ${project.build.directory}/resources/Change.avsc + true + + + default + io.debezium.connector.schema.Column + ${project.build.directory}/resources/Column.avsc + true + + + default + io.debezium.connector.schema.Table + ${project.build.directory}/resources/Table.avsc + true + - - default - example-key - ${project.build.directory}/resources/SchemaChangeKey.avsc - true - - - default - example-value - ${project.build.directory}/resources/SchemaChangeValue.avsc - true - + + default + example-key + ${project.build.directory}/resources/SchemaChangeKey.avsc + true + + + default + example-value + ${project.build.directory}/resources/SchemaChangeValue.avsc + true + - - default - example.inventory.addresses-key - - ${project.build.directory}/resources/addresses/Key.avsc - true - - - default - example.inventory.addresses-value - - ${project.build.directory}/resources/addresses/Envelope.avsc - true - - - default - example.inventory.addresses.Value - - ${project.build.directory}/resources/addresses/Value.avsc - true - + + default + example.inventory.addresses-key + ${project.build.directory}/resources/addresses/Key.avsc + true + + + default + example.inventory.addresses-value + ${project.build.directory}/resources/addresses/Envelope.avsc + true + + + default + example.inventory.addresses.Value + ${project.build.directory}/resources/addresses/Value.avsc + true + - - default - example.inventory.customers-key - - ${project.build.directory}/resources/customers/Key.avsc - true - - - default - example.inventory.customers-value - - ${project.build.directory}/resources/customers/Envelope.avsc - true - - - default - example.inventory.customers.Value - - ${project.build.directory}/resources/customers/Value.avsc - true - + + default + example.inventory.customers-key + ${project.build.directory}/resources/customers/Key.avsc + true + + + default + example.inventory.customers-value + ${project.build.directory}/resources/customers/Envelope.avsc + true + + + default + example.inventory.customers.Value + ${project.build.directory}/resources/customers/Value.avsc + true + - - default - example.inventory.orders-key - - ${project.build.directory}/resources/orders/Key.avsc - true - - - default - example.inventory.orders-value - - ${project.build.directory}/resources/orders/Envelope.avsc - true - - - default - example.inventory.orders.Value - - ${project.build.directory}/resources/orders/Value.avsc - true - + + default + example.inventory.orders-key + ${project.build.directory}/resources/orders/Key.avsc + true + + + default + example.inventory.orders-value + ${project.build.directory}/resources/orders/Envelope.avsc + true + + + default + example.inventory.orders.Value + ${project.build.directory}/resources/orders/Value.avsc + true + - - default - example.inventory.products-key - - ${project.build.directory}/resources/products/Key.avsc - true - - - default - example.inventory.products-value - - ${project.build.directory}/resources/products/Envelope.avsc - true - - - default - example.inventory.products.Value - - ${project.build.directory}/resources/products/Value.avsc - true - + + default + example.inventory.products-key + ${project.build.directory}/resources/products/Key.avsc + true + + + default + example.inventory.products-value + ${project.build.directory}/resources/products/Envelope.avsc + true + + + default + example.inventory.products.Value + ${project.build.directory}/resources/products/Value.avsc + true + - - default - example.inventory.products_on_hand-key - - ${project.build.directory}/resources/products_on_hand/Key.avsc - true - - - default - example.inventory.products_on_hand-value - - ${project.build.directory}/resources/products_on_hand/Envelope.avsc - true - - - default - example.inventory.products_on_hand.Value - - ${project.build.directory}/resources/products_on_hand/Value.avsc - true - + + default + example.inventory.products_on_hand-key + ${project.build.directory}/resources/products_on_hand/Key.avsc + true + + + default + example.inventory.products_on_hand-value + ${project.build.directory}/resources/products_on_hand/Envelope.avsc + true + + + default + example.inventory.products_on_hand.Value + ${project.build.directory}/resources/products_on_hand/Value.avsc + true + - - - - - + + + + + - - org.apache.avro - avro-maven-plugin - ${version.avro} - - - generate-sources - - schema - - - String - - ${project.basedir}/target/resources/Column.avsc - ${project.basedir}/target/resources/Table.avsc + + org.apache.avro + avro-maven-plugin + ${version.avro} + + + + schema + + generate-sources + + String + + ${project.basedir}/target/resources/Column.avsc + ${project.basedir}/target/resources/Table.avsc - ${project.basedir}/target/resources/addresses/Value.avsc - ${project.basedir}/target/resources/customers/Value.avsc - ${project.basedir}/target/resources/orders/Value.avsc - ${project.basedir}/target/resources/products/Value.avsc - ${project.basedir}/target/resources/products_on_hand/Value.avsc - - ${project.basedir}/target/resources/ - ${project.basedir}/target/generated-sources/avro - - - - + ${project.basedir}/target/resources/addresses/Value.avsc + ${project.basedir}/target/resources/customers/Value.avsc + ${project.basedir}/target/resources/orders/Value.avsc + ${project.basedir}/target/resources/products/Value.avsc + ${project.basedir}/target/resources/products_on_hand/Value.avsc + + ${project.basedir}/target/resources/ + ${project.basedir}/target/generated-sources/avro + + + + - - + + - - + diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java index d3393a227f..e178e0efb6 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/Operation.java @@ -1,6 +1,5 @@ package io.apicurio.example.debezium; - import lombok.Getter; import java.util.HashMap; @@ -11,11 +10,9 @@ */ public enum Operation { - CREATE("c"), - READ("r"), // Used for snapshots, i.e. writes the initial (or incremental) state of database tables to each topic - UPDATE("u"), - DELETE("d"), - TRUNCATE("t"); + CREATE("c"), READ("r"), // Used for snapshots, i.e. writes the initial (or incremental) state of database + // tables to each topic + UPDATE("u"), DELETE("d"), TRUNCATE("t"); @Getter private String op; diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java index 4c515acd0e..7cf49999b7 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/ExampleKafkaConsumer.java @@ -8,13 +8,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.event.Observes; -import javax.inject.Inject; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.List; +import javax.enterprise.context.ApplicationScoped; +import javax.enterprise.event.Observes; +import javax.inject.Inject; + /** * @author Jakub Senko m@jsenko.net */ @@ -26,23 +27,18 @@ public class ExampleKafkaConsumer { @Inject KafkaFactory kafkaFactory; - void onStart(@Observes StartupEvent event) { Runnable runner = () -> { try (KafkaConsumer consumer = kafkaFactory.createKafkaConsumer()) { - var topics = List.of( - "example.inventory.addresses", - "example.inventory.customers", - "example.inventory.orders", - "example.inventory.products", - "example.inventory.products_on_hand" - ); + var topics = List.of("example.inventory.addresses", "example.inventory.customers", + "example.inventory.orders", "example.inventory.products", + "example.inventory.products_on_hand"); var existingTopic = consumer.listTopics().keySet(); if (!existingTopic.containsAll(topics)) { - throw new IllegalStateException("Some topics are not available. " + - "Expected: " + topics + ", actual: " + existingTopic); + throw new IllegalStateException("Some topics are not available. " + "Expected: " + topics + + ", actual: " + existingTopic); } consumer.subscribe(topics); @@ -66,7 +62,8 @@ void onStart(@Observes StartupEvent event) { log.info("Raw key: {}", record.key()); log.info("Raw key schema: {}", ((SpecificRecord) record.key()).getSchema()); log.info("Raw value: {}", record.value()); - log.info("Raw value schema: {}", ((SpecificRecord) record.value()).getSchema()); + log.info("Raw value schema: {}", + ((SpecificRecord) record.value()).getSchema()); switch (record.topic()) { case "example.inventory.addresses": { @@ -107,15 +104,19 @@ void onStart(@Observes StartupEvent event) { } case "example.inventory.products_on_hand": { var key = (example.inventory.products_on_hand.Key) record.key(); - var value = (example.inventory.products_on_hand.Envelope) record.value(); - log.info("Operation {} on ProductOnHand", Operation.from(value.getOp())); + var value = (example.inventory.products_on_hand.Envelope) record + .value(); + log.info("Operation {} on ProductOnHand", + Operation.from(value.getOp())); log.info("Product ID: {}", key.getProductId()); log.info("Before: {}", ProductOnHand.from(value.getBefore())); log.info("After: {}", ProductOnHand.from(value.getAfter())); break; } default: - throw new IllegalStateException("Received a message from unexpected topic: " + record.topic()); + throw new IllegalStateException( + "Received a message from unexpected topic: " + + record.topic()); } }); } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java index febb0e6bc5..b619f1c979 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/kafka/KafkaFactory.java @@ -9,9 +9,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.enterprise.context.ApplicationScoped; import java.util.Properties; +import javax.enterprise.context.ApplicationScoped; + /** * @author Jakub Senko m@jsenko.net */ @@ -35,8 +36,10 @@ public KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 1000); props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); log.debug("Registry URL: {}", registryUrl); props.putIfAbsent(SerdeConfig.REGISTRY_URL, registryUrl); diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java index 96a100d898..d0a085b16b 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Address.java @@ -31,14 +31,8 @@ public static Address from(Value value) { if (value == null) { return null; } - return Address.builder() - .id(value.getId()) - .customerId(value.getCustomerId()) - .street(value.getStreet()) - .city(value.getCity()) - .state(value.getState()) - .zip(value.getZip()) - .type(value.getType()) + return Address.builder().id(value.getId()).customerId(value.getCustomerId()).street(value.getStreet()) + .city(value.getCity()).state(value.getState()).zip(value.getZip()).type(value.getType()) .build(); } } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java index 15590bd35d..6ca5a62646 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Customer.java @@ -25,11 +25,7 @@ public static Customer from(Value value) { if (value == null) { return null; } - return Customer.builder() - .id(value.getId()) - .firstName(value.getFirstName()) - .lastName(value.getLastName()) - .email(value.getEmail()) - .build(); + return Customer.builder().id(value.getId()).firstName(value.getFirstName()) + .lastName(value.getLastName()).email(value.getEmail()).build(); } } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java index b0d9b1f166..2c51c1f257 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Order.java @@ -30,12 +30,9 @@ public static Order from(Value value) { if (value == null) { return null; } - return Order.builder() - .orderNumber(value.getOrderNumber()) + return Order.builder().orderNumber(value.getOrderNumber()) .orderDate(Instant.EPOCH.plus(Duration.ofDays(value.getOrderDate()))) - .purchaser(value.getPurchaser()) - .quantity(value.getQuantity()) - .productId(value.getProductId()) + .purchaser(value.getPurchaser()).quantity(value.getQuantity()).productId(value.getProductId()) .build(); } } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java index a48818006b..7566714c14 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/Product.java @@ -25,11 +25,7 @@ public static Product from(Value value) { if (value == null) { return null; } - return Product.builder() - .id(value.getId()) - .name(value.getName()) - .description(value.getDescription()) - .weight(value.getWeight()) - .build(); + return Product.builder().id(value.getId()).name(value.getName()).description(value.getDescription()) + .weight(value.getWeight()).build(); } } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java index 04429a0828..b6516024c2 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/model/ProductOnHand.java @@ -21,9 +21,6 @@ public static ProductOnHand from(Value value) { if (value == null) { return null; } - return ProductOnHand.builder() - .productId(value.getProductId()) - .quantity(value.getQuantity()) - .build(); + return ProductOnHand.builder().productId(value.getProductId()).quantity(value.getQuantity()).build(); } } diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java index 2dfeb81494..7a073df0e3 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/Api.java @@ -18,7 +18,6 @@ public class Api { @Inject ExampleRunner runner; - @POST @Path("/command") public String command(String command) { diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java index 1d9ad49f5f..6e75f95f77 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/rest/ExampleRunner.java @@ -8,11 +8,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; import java.util.Random; import java.util.UUID; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + import static io.quarkus.scheduler.Scheduled.ConcurrentExecution.SKIP; @ApplicationScoped @@ -22,7 +23,6 @@ public class ExampleRunner { private static final Logger log = LoggerFactory.getLogger(ExampleRunner.class); - @Getter @Setter private boolean isEnabled; @@ -30,14 +30,11 @@ public class ExampleRunner { @Inject Database database; - @Scheduled(every = "5s", concurrentExecution = SKIP) public void run() { if (isEnabled) { - var product = Product.builder() - .name("name-" + UUID.randomUUID()) - .description("description-" + UUID.randomUUID()) - .weight(RANDOM.nextFloat() * 100 + 1) + var product = Product.builder().name("name-" + UUID.randomUUID()) + .description("description-" + UUID.randomUUID()).weight(RANDOM.nextFloat() * 100 + 1) .build(); log.info("Inserting: {}", product); product.setId(database.insertProduct(product)); diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java index ffff218169..0e154cc874 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/Database.java @@ -3,11 +3,12 @@ import io.agroal.api.AgroalDataSource; import io.apicurio.example.debezium.model.Product; -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; import java.sql.*; import java.util.List; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; + /** * @author Jakub Senko m@jsenko.net */ @@ -17,36 +18,30 @@ public class Database { @Inject AgroalDataSource dataSource; - public int insertProduct(Product product) { - return executeUpdate("INSERT INTO products VALUES (default,?,?,?)", List.of( - new SqlParam(0, product.getName(), SqlParamType.STRING), - new SqlParam(1, product.getDescription(), SqlParamType.STRING), - new SqlParam(2, product.getWeight(), SqlParamType.FLOAT) - )); + return executeUpdate("INSERT INTO products VALUES (default,?,?,?)", + List.of(new SqlParam(0, product.getName(), SqlParamType.STRING), + new SqlParam(1, product.getDescription(), SqlParamType.STRING), + new SqlParam(2, product.getWeight(), SqlParamType.FLOAT))); } - public void updateProduct(Product product) { - executeUpdate("UPDATE products SET name = ?, description = ?, weight = ? WHERE id = ?", List.of( - new SqlParam(0, product.getName(), SqlParamType.STRING), - new SqlParam(1, product.getDescription(), SqlParamType.STRING), - new SqlParam(2, product.getWeight(), SqlParamType.FLOAT), - new SqlParam(3, product.getId(), SqlParamType.INTEGER) - )); + executeUpdate("UPDATE products SET name = ?, description = ?, weight = ? WHERE id = ?", + List.of(new SqlParam(0, product.getName(), SqlParamType.STRING), + new SqlParam(1, product.getDescription(), SqlParamType.STRING), + new SqlParam(2, product.getWeight(), SqlParamType.FLOAT), + new SqlParam(3, product.getId(), SqlParamType.INTEGER))); } - public void deleteProduct(Product product) { - executeUpdate("DELETE FROM products WHERE id = ?", List.of( - new SqlParam(0, product.getId(), SqlParamType.INTEGER) - )); + executeUpdate("DELETE FROM products WHERE id = ?", + List.of(new SqlParam(0, product.getId(), SqlParamType.INTEGER))); } - private int executeUpdate(String sql, List parameters) { try (Connection connection = dataSource.getConnection()) { - try (PreparedStatement statement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) { + try (PreparedStatement statement = connection.prepareStatement(sql, + Statement.RETURN_GENERATED_KEYS)) { parameters.forEach(p -> { p.bindTo(statement); }); diff --git a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java index 03d0302012..9cbe01c1b5 100644 --- a/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java +++ b/examples/debezium-openshift/src/main/java/io/apicurio/example/debezium/sql/SqlParam.java @@ -34,16 +34,15 @@ public class SqlParam { private final SqlParamType type; - public SqlParam(int position, Object value, SqlParamType type) { this.position = position; this.value = value; this.type = type; } - public void bindTo(PreparedStatement statement) { - int position = this.position + 1; // Convert from sensible position (starts at 0) to JDBC position index (starts at 1) + int position = this.position + 1; // Convert from sensible position (starts at 0) to JDBC position + // index (starts at 1) try { switch (type) { case BYTES: diff --git a/examples/docker-compose/pom.xml b/examples/docker-compose/pom.xml index a3d989f400..dc96c9a647 100644 --- a/examples/docker-compose/pom.xml +++ b/examples/docker-compose/pom.xml @@ -1,53 +1,53 @@ - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry-distro - 3.0.0-SNAPSHOT - ../../distro/pom.xml - + + io.apicurio + apicurio-registry-distro + 3.0.0-SNAPSHOT + ../../distro/pom.xml + - apicurio-registry-distro-docker-compose - pom - apicurio-registry-distro-docker-compose + apicurio-registry-distro-docker-compose + pom + apicurio-registry-distro-docker-compose - - - - org.apache.maven.plugins - maven-assembly-plugin - - - assembly - package - - single - - - ${project.artifactId}-${project.version} - true - - src/main/assembly/assembly.xml - - - 0755 - - ${tar.long.file.mode} - - - - + + + + org.apache.maven.plugins + maven-assembly-plugin + + + assembly + + single + + package + + ${project.artifactId}-${project.version} + true + + src/main/assembly/assembly.xml + + + 0755 + + ${tar.long.file.mode} + + + + - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - - + + org.apache.maven.plugins + maven-deploy-plugin + + true + + + + diff --git a/examples/jsonschema-validation/pom.xml b/examples/jsonschema-validation/pom.xml index 37010b992c..65bdc3855f 100644 --- a/examples/jsonschema-validation/pom.xml +++ b/examples/jsonschema-validation/pom.xml @@ -1,23 +1,22 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-jsonschema-validation - jar + apicurio-registry-examples-jsonschema-validation + jar - - - io.apicurio - apicurio-registry-schema-validation-jsonschema - ${apicurio-registry-schema-validation.version} - - + + + io.apicurio + apicurio-registry-schema-validation-jsonschema + ${apicurio-registry-schema-validation.version} + + diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java index ea1a4d5f05..b6b1f198a1 100644 --- a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/InvalidMessageBean.java @@ -22,10 +22,10 @@ * @author famartin@redhat.com */ public class InvalidMessageBean { - + private String message; private Date time; - + /** * Constructor. */ diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java index 6b5dfbc177..f99ce4cd14 100644 --- a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/JsonSchemaValidationExample.java @@ -44,18 +44,19 @@ * This example demonstrates how to use Apicurio Registry Schema Validation library for JSON and JSON Schema. *

* The following aspects are demonstrated: - * *

    - *
  1. Register the JSON Schema in the registry
  2. - *
  3. Configuring a JsonValidator that will use Apicurio Registry to fetch and cache the schema to use for validation
  4. - *
  5. Successfully validate Java objects using static configuration to always use the same schema for validation
  6. - *
  7. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use for validation
  8. + *
  9. Register the JSON Schema in the registry
  10. + *
  11. Configuring a JsonValidator that will use Apicurio Registry to fetch and cache the schema to use for + * validation
  12. + *
  13. Successfully validate Java objects using static configuration to always use the same schema for + * validation
  14. + *
  15. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use + * for validation
  16. *
*

* Pre-requisites: - * *

    - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -64,31 +65,17 @@ public class JsonSchemaValidationExample { private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; - public static final String SCHEMA = "{" + - " \"$id\": \"https://example.com/message.schema.json\"," + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + - " \"required\": [" + - " \"message\"," + - " \"time\"" + - " ]," + - " \"type\": \"object\"," + - " \"properties\": {" + - " \"message\": {" + - " \"description\": \"\"," + - " \"type\": \"string\"" + - " }," + - " \"time\": {" + - " \"description\": \"\"," + - " \"type\": \"number\"" + - " }" + - " }" + - "}"; - + public static final String SCHEMA = "{" + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + " \"required\": [" + + " \"message\"," + " \"time\"" + " ]," + " \"type\": \"object\"," + + " \"properties\": {" + " \"message\": {" + " \"description\": \"\"," + + " \"type\": \"string\"" + " }," + " \"time\": {" + + " \"description\": \"\"," + " \"type\": \"number\"" + " }" + + " }" + "}"; public static final void main(String[] args) throws Exception { System.out.println("Starting example " + JsonSchemaValidationExample.class.getSimpleName()); - // Register the schema with the registry (only if it is not already registered) String artifactId = JsonSchemaValidationExample.class.getSimpleName(); RegistryClient client = createRegistryClient(REGISTRY_URL); @@ -98,9 +85,8 @@ public static final void main(String[] args) throws Exception { createArtifact.setArtifactType(ArtifactType.JSON); createArtifact.setFirstVersion(new CreateVersion()); createArtifact.getFirstVersion().setContent(new VersionContent()); - createArtifact.getFirstVersion().getContent().setContent( - IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8)) - ); + createArtifact.getFirstVersion().getContent() + .setContent(IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8))); createArtifact.getFirstVersion().getContent().setContentType("application/json"); client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { @@ -109,13 +95,12 @@ public static final void main(String[] args) throws Exception { // Create an artifact reference pointing to the artifact we just created // and pass it to the JsonValidator - ArtifactReference artifactReference = ArtifactReference.builder() - .groupId("default") - .artifactId(artifactId) - .build(); + ArtifactReference artifactReference = ArtifactReference.builder().groupId("default") + .artifactId(artifactId).build(); // Create the JsonValidator providing an ArtifactReference - // this ArtifactReference will allways be used to lookup the schema in the registry when using "validateByArtifactReference" + // this ArtifactReference will allways be used to lookup the schema in the registry when using + // "validateByArtifactReference" JsonValidator validator = createJsonValidator(artifactReference); // Test successfull validation @@ -141,7 +126,8 @@ public static final void main(String[] args) throws Exception { System.out.println("Validation result: " + invalidBeanResult); System.out.println(); - // Test validate method providing a record to dynamically resolve the artifact to fetch from the registry + // Test validate method providing a record to dynamically resolve the artifact to fetch from the + // registry JsonRecord record = new JsonRecord(bean, new JsonMetadata(artifactReference)); @@ -158,7 +144,7 @@ public static final void main(String[] args) throws Exception { private static RegistryClient createRegistryClient(String registryUrl) { final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. if (tokenEndpoint != null) { final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); @@ -181,7 +167,7 @@ private static JsonValidator createJsonValidator(ArtifactReference artifactRefer // Configure Service Registry location props.putIfAbsent(SchemaResolverConfig.REGISTRY_URL, REGISTRY_URL); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the json validator diff --git a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java index 34599952ef..af5af0eb18 100644 --- a/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java +++ b/examples/jsonschema-validation/src/main/java/io/apicurio/registry/examples/validation/json/MessageBean.java @@ -20,10 +20,10 @@ * @author eric.wittmann@gmail.com */ public class MessageBean { - + private String message; private long time; - + /** * Constructor. */ diff --git a/examples/mix-avro/pom.xml b/examples/mix-avro/pom.xml index 0740fa1769..837b598822 100644 --- a/examples/mix-avro/pom.xml +++ b/examples/mix-avro/pom.xml @@ -1,43 +1,42 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-mix-avro - jar + apicurio-registry-examples-mix-avro + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-simple - ${slf4j.version} - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + diff --git a/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java b/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java index 40550fe670..c1346455f6 100644 --- a/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java +++ b/examples/mix-avro/src/main/java/io/apicurio/registry/examples/mix/avro/MixAvroExample.java @@ -16,12 +16,10 @@ package io.apicurio.registry.examples.mix.avro; -import java.time.Duration; -import java.util.Collections; -import java.util.Date; -import java.util.Properties; -import java.util.UUID; - +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; +import io.apicurio.registry.serde.avro.AvroKafkaSerializer; +import io.apicurio.registry.serde.avro.strategy.RecordIdStrategy; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; @@ -36,30 +34,30 @@ import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; -import io.apicurio.registry.serde.SerdeConfig; -import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; -import io.apicurio.registry.serde.avro.AvroKafkaSerializer; -import io.apicurio.registry.serde.avro.strategy.RecordIdStrategy; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.Properties; +import java.util.UUID; /** - * This example application showcases a scenario where Apache Avro messages are published to the same - * Kafka topic using different Avro schemas. This example uses the Apicurio Registry serdes classes to serialize - * and deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. - * The following aspects are demonstrated: - * + * This example application showcases a scenario where Apache Avro messages are published to the same Kafka + * topic using different Avro schemas. This example uses the Apicurio Registry serdes classes to serialize and + * deserialize Apache Avro messages using different schemas, even if received in the same Kafka topic. The + * following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. - *
  9. Producing and consuming Avro messages using different schemas mapped to different Apicurio Registry Artifacts
  10. + *
  11. Configuring a Kafka Serializer for use with Apicurio Registry
  12. + *
  13. Configuring a Kafka Deserializer for use with Apicurio Registry
  14. + *
  15. Auto-register the Avro schema in the registry (registered by the producer)
  16. + *
  17. Data sent as a simple GenericRecord, no java beans needed
  18. + *
  19. Producing and consuming Avro messages using different schemas mapped to different Apicurio Registry + * Artifacts
  20. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author Fabian Martinez @@ -92,7 +90,6 @@ public static void main(String[] args) throws Exception { producedMessages += produceMessages(producer, topicName, FAREWELLSCHEMAV2, "extra farewell"); - } finally { System.out.println("Closing the producer."); producer.flush(); @@ -117,22 +114,27 @@ public static void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - value.getSchema().getFullName(); - if (value.hasField("Extra")) { - System.out.println("Consumed " + value.getSchema().getFullName() + ": " + value.get("Message") + " @ " + new Date((long) value.get("Time")) + " @ " + value.get("Extra")); - } else { - System.out.println("Consumed " + value.getSchema().getFullName() + ": " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); - } - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + value.getSchema().getFullName(); + if (value.hasField("Extra")) { + System.out.println("Consumed " + value.getSchema().getFullName() + ": " + + value.get("Message") + " @ " + new Date((long) value.get("Time")) + + " @ " + value.get("Extra")); + } else { + System.out.println("Consumed " + value.getSchema().getFullName() + ": " + + value.get("Message") + " @ " + new Date((long) value.get("Time"))); + } + }); } } System.out.println("Done (success)."); } - private static int produceMessages(Producer producer, String topicName, String schemaContent, String extra) throws InterruptedException { + private static int produceMessages(Producer producer, String topicName, + String schemaContent, String extra) throws InterruptedException { int producedMessages = 0; Schema schema = new Schema.Parser().parse(schemaContent); System.out.println("Producing (5) messages."); @@ -148,7 +150,8 @@ private static int produceMessages(Producer producer, String top } // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, UUID.randomUUID().toString(), record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, + UUID.randomUUID().toString(), record); producer.send(producedRecord); Thread.sleep(100); @@ -179,7 +182,7 @@ private static Producer createKafkaProducer() { // Get an existing schema or auto-register if not found props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -201,15 +204,16 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -228,13 +232,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/openshift-template/pom.xml b/examples/openshift-template/pom.xml index df9fc5cceb..239e9f6a1f 100644 --- a/examples/openshift-template/pom.xml +++ b/examples/openshift-template/pom.xml @@ -1,28 +1,28 @@ - - 4.0.0 + + + 4.0.0 - - io.apicurio - apicurio-registry-distro - 3.0.0-SNAPSHOT - ../../distro/pom.xml - + + io.apicurio + apicurio-registry-distro + 3.0.0-SNAPSHOT + ../../distro/pom.xml + - apicurio-registry-distro-openshift-template - pom - apicurio-registry-distro-openshift-template + apicurio-registry-distro-openshift-template + pom + apicurio-registry-distro-openshift-template - - - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - - + + + + org.apache.maven.plugins + maven-deploy-plugin + + true + + + + diff --git a/examples/pom.xml b/examples/pom.xml index 58f6e46a11..7b6bc2e6d9 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -1,244 +1,244 @@ - 4.0.0 + 4.0.0 - apicurio-registry-examples + + io.apicurio + apicurio-registry 3.0.0-SNAPSHOT - pom - - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - - - https://www.apicur.io/ - Open Source API & Schema Registry - - - Red Hat - https://www.redhat.com - - - - - Apache License Version 2.0 - https://repository.jboss.org/licenses/apache-2.0.txt - repo - - - - - GitHub - https://github.com/apicurio/apicurio-registry-examples/issues - - - - scm:git:git@github.com:apicurio/apicurio-registry-examples.git - scm:git:git@github.com:apicurio/apicurio-registry-examples.git - scm:git:git@github.com:apicurio/apicurio-registry-examples.git - - - - simple-avro - simple-json - confluent-serdes - avro-bean - custom-resolver - custom-strategy - simple-avro-maven - rest-client - mix-avro - jsonschema-validation - simple-validation - serdes-with-references - avro-maven-with-references - avro-maven-with-references-auto - protobuf-validation - simple-protobuf - protobuf-find-latest - protobuf-bean - - - - UTF-8 - UTF-8 - yyyy-MM-dd HH:mm:ss - ${maven.build.timestamp} - - 17 - 17 - - - 3.5.0 - - - 4.5.8.Final - - - 2.0.13 - - - 3.9.5 - - - 0.1.18.Final - - - 0.0.7 - - - 3.13.0 - 3.1.2 - 3.3.0 - 3.7.0 - 3.3.1 - 3.3.0 - 3.4.1 - 1.2.1 - 3.7.0 - 3.7.1 - 3.3.1 - 3.3.2 - 1.11.0 - 3.21.6 - - 0.6.1 - 1.7.0 - - - - - - - org.codehaus.mojo - properties-maven-plugin - ${version.properties.plugin} - - - org.apache.maven.plugins - maven-compiler-plugin - ${version.compiler.plugin} - - - org.apache.maven.plugins - maven-source-plugin - ${version.source.plugin} - - - org.apache.maven.plugins - maven-javadoc-plugin - ${version.javadoc.plugin} - - - org.apache.maven.plugins - maven-failsafe-plugin - ${version.failsafe.plugin} - - - org.apache.maven.plugins - maven-surefire-plugin - ${version.surefire.plugin} - - - org.apache.maven.plugins - maven-deploy-plugin - ${version.deploy.plugin} - - - org.apache.maven.plugins - maven-jar-plugin - ${version.jar.plugin} - - - org.apache.maven.plugins - maven-dependency-plugin - ${version.dependency.plugin} - - - org.apache.maven.plugins - maven-assembly-plugin - ${version.assembly.plugin} - - - org.apache.maven.plugins - maven-resources-plugin - ${version.resources.plugin} - - - org.apache.maven.plugins - maven-clean-plugin - ${version.clean.plugin} - - - + ../pom.xml + + + apicurio-registry-examples + 3.0.0-SNAPSHOT + pom + Open Source API & Schema Registry + + https://www.apicur.io/ + + + Red Hat + https://www.redhat.com + + + + + Apache License Version 2.0 + https://repository.jboss.org/licenses/apache-2.0.txt + repo + + + + + simple-avro + simple-json + confluent-serdes + avro-bean + custom-resolver + custom-strategy + simple-avro-maven + rest-client + mix-avro + jsonschema-validation + simple-validation + serdes-with-references + avro-maven-with-references + avro-maven-with-references-auto + protobuf-validation + simple-protobuf + protobuf-find-latest + protobuf-bean + + + + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + scm:git:git@github.com:apicurio/apicurio-registry-examples.git + + + + GitHub + https://github.com/apicurio/apicurio-registry-examples/issues + + + + UTF-8 + UTF-8 + yyyy-MM-dd HH:mm:ss + ${maven.build.timestamp} + + 17 + 17 + + + 3.5.0 + + + 4.5.8.Final + + + 2.0.13 + + + 3.9.5 + + + 0.1.18.Final + + + 0.0.7 + + + 3.13.0 + 3.1.2 + 3.3.0 + 3.7.0 + 3.3.1 + 3.3.0 + 3.4.1 + 1.2.1 + 3.7.0 + 3.7.1 + 3.3.1 + 3.3.2 + 1.11.0 + 3.21.6 + + 0.6.1 + 1.7.0 + + + + + + + org.codehaus.mojo + properties-maven-plugin + ${version.properties.plugin} + + + org.apache.maven.plugins + maven-compiler-plugin + ${version.compiler.plugin} + + + org.apache.maven.plugins + maven-source-plugin + ${version.source.plugin} + + + org.apache.maven.plugins + maven-javadoc-plugin + ${version.javadoc.plugin} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${version.failsafe.plugin} + + + org.apache.maven.plugins + maven-surefire-plugin + ${version.surefire.plugin} + + + org.apache.maven.plugins + maven-deploy-plugin + ${version.deploy.plugin} + + + org.apache.maven.plugins + maven-jar-plugin + ${version.jar.plugin} + + + org.apache.maven.plugins + maven-dependency-plugin + ${version.dependency.plugin} + + + org.apache.maven.plugins + maven-assembly-plugin + ${version.assembly.plugin} + + + org.apache.maven.plugins + maven-resources-plugin + ${version.resources.plugin} + + + org.apache.maven.plugins + maven-clean-plugin + ${version.clean.plugin} + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.target} + ${maven.compiler.target} + false + false + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + org.jboss.spec.javax.annotation + jboss-annotations-api_1.2_spec + 1.0.1.Final + + + false + false + + + + attach-javadocs + + jar + + + + + + + + + + java8 + + [1.8,) + + - - org.apache.maven.plugins - maven-compiler-plugin - - ${maven.compiler.target} - ${maven.compiler.target} - false - false - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - - jar-no-fork - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - attach-javadocs - - jar - - - - - - - org.jboss.spec.javax.annotation - jboss-annotations-api_1.2_spec - 1.0.1.Final - - - false - false - - + + org.apache.maven.plugins + maven-javadoc-plugin + + -Xdoclint:none + + - - - - - java8 - - [1.8,) - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - -Xdoclint:none - - - - - - + + + diff --git a/examples/protobuf-bean/pom.xml b/examples/protobuf-bean/pom.xml index c419a1e7e0..4584c7cc8f 100644 --- a/examples/protobuf-bean/pom.xml +++ b/examples/protobuf-bean/pom.xml @@ -1,76 +1,73 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-protobuf-bean - jar + apicurio-registry-examples-protobuf-bean + jar - - 0.6.1 - + + 0.6.1 + - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + diff --git a/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java b/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java index 67970ae6d4..acb98c4edc 100644 --- a/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java +++ b/examples/protobuf-bean/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufBeanExample.java @@ -42,21 +42,19 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Protobuf as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Protobuf as the serialization type. The following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Protobuf schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a custom java bean and received as the same java bean
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Protobuf schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a custom java bean and received as the same java bean
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -68,7 +66,6 @@ public class ProtobufBeanExample { private static final String TOPIC_NAME = ProtobufBeanExample.class.getSimpleName(); private static final String SCHEMA_NAME = "AddressBook"; - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + ProtobufBeanExample.class.getSimpleName()); String topicName = TOPIC_NAME; @@ -82,20 +79,13 @@ public static final void main(String[] args) throws Exception { for (int idx = 0; idx < 2; idx++) { AddressBookProtos.AddressBook book = AddressBook.newBuilder() - .addPeople(Person.newBuilder() - .setEmail("aa@bb.com") - .setId(1) - .setName("aa") - .build()) - .addPeople(Person.newBuilder() - .setEmail("bb@bb.com") - .setId(2) - .setName("bb") - .build()) + .addPeople(Person.newBuilder().setEmail("aa@bb.com").setId(1).setName("aa").build()) + .addPeople(Person.newBuilder().setEmail("bb@bb.com").setId(2).setName("bb").build()) .build(); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, book); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, + book); producer.send(producedRecord); Thread.sleep(100); @@ -125,10 +115,12 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - AddressBook value = record.value(); - System.out.println("Consumed a message: People count in AddressBook " + value.getPeopleCount()); - }); + } else + records.forEach(record -> { + AddressBook value = record.value(); + System.out.println( + "Consumed a message: People count in AddressBook " + value.getPeopleCount()); + }); } } finally { consumer.close(); @@ -138,8 +130,10 @@ public static final void main(String[] args) throws Exception { vertXRequestAdapter.setBaseUrl(REGISTRY_URL); RegistryClient client = new RegistryClient(vertXRequestAdapter); System.out.println("The artifact created in Apicurio Registry is: "); - //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value - System.out.println(IoUtil.toString(client.groups().byGroupId("default").artifacts().byArtifactId(topicName + "-value").versions().byVersionExpression("1").content().get())); + // because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of + // topicName-value + System.out.println(IoUtil.toString(client.groups().byGroupId("default").artifacts() + .byArtifactId(topicName + "-value").versions().byVersionExpression("1").content().get())); System.out.println(); VertXAuthFactory.defaultVertx.close(); System.out.println("Done (success)."); @@ -157,7 +151,8 @@ private static Producer createKafkaProducer() { props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // Use the Apicurio Registry provided Kafka Serializer for Protobuf - props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + ProtobufKafkaSerializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); @@ -166,7 +161,7 @@ private static Producer createKafkaProducer() { // Register the artifact if not found in the registry. props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -188,18 +183,19 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Protobuf - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + ProtobufKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. // the serializer also puts information about the AddressBook java class in the kafka record headers // with this the deserializer can automatically return that same java class. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -218,13 +214,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } diff --git a/examples/protobuf-find-latest/pom.xml b/examples/protobuf-find-latest/pom.xml index 137d820544..380fa79d23 100644 --- a/examples/protobuf-find-latest/pom.xml +++ b/examples/protobuf-find-latest/pom.xml @@ -1,77 +1,74 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-protobuf-find-latest - jar + apicurio-registry-examples-protobuf-find-latest + jar - - 0.6.1 - + + 0.6.1 + - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + + diff --git a/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java b/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java index c5672dbdf4..b1ed3bb4a8 100644 --- a/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java +++ b/examples/protobuf-find-latest/src/main/java/io/apicurio/registry/examples/simple/protobuf/ProtobufFindLatestExample.java @@ -49,21 +49,21 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Protobuf as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Protobuf as the serialization type. The following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Manually registering the Protobuf schema in the registry (registered using the RegistryClient before running the producer/consumer), this would be equivalent to using the maven plugin or a custom CI/CD process
  6. - *
  7. Data sent as a custom java bean and received as the same java bean
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Manually registering the Protobuf schema in the registry (registered using the RegistryClient before + * running the producer/consumer), this would be equivalent to using the maven plugin or a custom CI/CD + * process
  14. + *
  15. Data sent as a custom java bean and received as the same java bean
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -76,21 +76,21 @@ public class ProtobufFindLatestExample { private static final String TOPIC_NAME = ProtobufFindLatestExample.class.getSimpleName(); private static final String SCHEMA_NAME = "AddressBook"; - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + ProtobufFindLatestExample.class.getSimpleName()); String topicName = TOPIC_NAME; String key = SCHEMA_NAME; - VertXRequestAdapter vertXRequestAdapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); vertXRequestAdapter.setBaseUrl(REGISTRY_URL); RegistryClient client = new RegistryClient(vertXRequestAdapter); System.out.println("Manually creating the artifact in Apicurio Registry"); - //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value + // because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of + // topicName-value String artifactId = topicName + "-value"; - InputStream protofile = Thread.currentThread().getContextClassLoader().getResourceAsStream("person.proto"); + InputStream protofile = Thread.currentThread().getContextClassLoader() + .getResourceAsStream("person.proto"); CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); @@ -114,20 +114,13 @@ public static final void main(String[] args) throws Exception { for (int idx = 0; idx < 2; idx++) { AddressBookProtos.AddressBook book = AddressBook.newBuilder() - .addPeople(Person.newBuilder() - .setEmail("aa@bb.com") - .setId(1) - .setName("aa") - .build()) - .addPeople(Person.newBuilder() - .setEmail("bb@bb.com") - .setId(2) - .setName("bb") - .build()) + .addPeople(Person.newBuilder().setEmail("aa@bb.com").setId(1).setName("aa").build()) + .addPeople(Person.newBuilder().setEmail("bb@bb.com").setId(2).setName("bb").build()) .build(); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, book); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, key, + book); producer.send(producedRecord); Thread.sleep(100); @@ -157,10 +150,12 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - AddressBook value = record.value(); - System.out.println("Consumed a message: People count in AddressBook " + value.getPeopleCount()); - }); + } else + records.forEach(record -> { + AddressBook value = record.value(); + System.out.println( + "Consumed a message: People count in AddressBook " + value.getPeopleCount()); + }); } } finally { consumer.close(); @@ -181,7 +176,8 @@ private static Producer createKafkaProducer() { props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // Use the Apicurio Registry provided Kafka Serializer for Protobuf - props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + ProtobufKafkaSerializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); @@ -190,7 +186,7 @@ private static Producer createKafkaProducer() { // Find and use the latest artifact in the registry for the corresponding GroupId and ArtifactId props.putIfAbsent(SerdeConfig.FIND_LATEST_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -212,18 +208,19 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Protobuf - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + ProtobufKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. // the serializer also puts information about the AddressBook java class in the kafka record headers // with this the deserializer can automatically return that same java class. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -242,13 +239,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/protobuf-validation/pom.xml b/examples/protobuf-validation/pom.xml index 600661ed79..f4fe938b88 100644 --- a/examples/protobuf-validation/pom.xml +++ b/examples/protobuf-validation/pom.xml @@ -1,64 +1,60 @@ - - - apicurio-registry-examples - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-protobuf-validation + apicurio-registry-examples-protobuf-validation - - - io.apicurio - apicurio-registry-schema-validation-protobuf - ${apicurio-registry-schema-validation.version} - - + + + io.apicurio + apicurio-registry-schema-validation-protobuf + ${apicurio-registry-schema-validation.version} + + - + - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + - + - \ No newline at end of file + diff --git a/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java b/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java index d67db4d6cd..4736224e1e 100644 --- a/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java +++ b/examples/protobuf-validation/src/main/java/io/apicurio/registry/examples/validation/protobuf/ProtobufValidationExample.java @@ -47,18 +47,19 @@ * This example demonstrates how to use Apicurio Registry Schema Validation library for Protobuf *

* The following aspects are demonstrated: - * *

    - *
  1. Register the Protobuf Schema in the registry
  2. - *
  3. Configuring a Protobuf that will use Apicurio Registry to fetch and cache the schema to use for validation
  4. - *
  5. Successfully validate Java objects using static configuration to always use the same schema for validation
  6. - *
  7. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use for validation
  8. + *
  9. Register the Protobuf Schema in the registry
  10. + *
  11. Configuring a Protobuf that will use Apicurio Registry to fetch and cache the schema to use for + * validation
  12. + *
  13. Successfully validate Java objects using static configuration to always use the same schema for + * validation
  14. + *
  15. Successfully validate Java objects using dynamic configuration to dynamically choose the schema to use + * for validation
  16. *
*

* Pre-requisites: - * *

    - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author carnalca@redhat.com @@ -67,16 +68,9 @@ public class ProtobufValidationExample { private static final String REGISTRY_URL = "http://localhost:8080/apis/registry/v2"; - public static final String SCHEMA = - "syntax = \"proto3\";\n" - + "package io.apicurio.schema.validation.protobuf.ref;\n" - + "\n" - + "message MessageExample {\n" - + "\n" - + " string key = 1;\n" - + " string value = 2;\n" - + "\n" - + "}"; + public static final String SCHEMA = "syntax = \"proto3\";\n" + + "package io.apicurio.schema.validation.protobuf.ref;\n" + "\n" + "message MessageExample {\n" + + "\n" + " string key = 1;\n" + " string value = 2;\n" + "\n" + "}"; public static final void main(String[] args) throws Exception { System.out.println("Starting example " + ProtobufValidationExample.class.getSimpleName()); @@ -90,27 +84,27 @@ public static final void main(String[] args) throws Exception { createArtifact.setArtifactType(ArtifactType.PROTOBUF); createArtifact.setFirstVersion(new CreateVersion()); createArtifact.getFirstVersion().setContent(new VersionContent()); - createArtifact.getFirstVersion().getContent().setContent(IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8))); + createArtifact.getFirstVersion().getContent() + .setContent(IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8))); createArtifact.getFirstVersion().getContent().setContentType(ContentTypes.APPLICATION_PROTOBUF); client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; }); - // Create an artifact reference pointing to the artifact we just created // and pass it to the ProtobufValidator ArtifactReference artifactReference = ArtifactReference.builder().groupId("default") .artifactId(artifactId).build(); // Create the ProtobufValidator providing an ArtifactReference - // this ArtifactReference will allways be used to lookup the schema in the registry when using "validateByArtifactReference" + // this ArtifactReference will allways be used to lookup the schema in the registry when using + // "validateByArtifactReference" ProtobufValidator validator = createProtobufValidator(artifactReference); // Test successfull validation - MessageExample bean = MessageExample.newBuilder() - .setKey(UUID.randomUUID().toString()) + MessageExample bean = MessageExample.newBuilder().setKey(UUID.randomUUID().toString()) .setValue("Hello world").build(); System.out.println(); @@ -121,18 +115,16 @@ public static final void main(String[] args) throws Exception { // Test validation error - MessageExample2 invalidBean = MessageExample2.newBuilder() - .setKey2(UUID.randomUUID().toString()) - .setValue2(32) - .build(); - + MessageExample2 invalidBean = MessageExample2.newBuilder().setKey2(UUID.randomUUID().toString()) + .setValue2(32).build(); System.out.println("Validating invalid message bean"); ProtobufValidationResult invalidBeanResult = validator.validateByArtifactReference(invalidBean); System.out.println("Validation result: " + invalidBeanResult); System.out.println(); - // Test validate method providing a record to dynamically resolve the artifact to fetch from the registry + // Test validate method providing a record to dynamically resolve the artifact to fetch from the + // registry ProtobufRecord record = new ProtobufRecord(bean, new ProtobufMetadata(artifactReference)); @@ -149,7 +141,7 @@ public static final void main(String[] args) throws Exception { private static RegistryClient createRegistryClient(String registryUrl) { final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. if (tokenEndpoint != null) { final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); @@ -172,7 +164,7 @@ private static ProtobufValidator createProtobufValidator(ArtifactReference artif // Configure Service Registry location props.putIfAbsent(SchemaResolverConfig.REGISTRY_URL, REGISTRY_URL); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the protobuf validator diff --git a/examples/quarkus-auth/pom.xml b/examples/quarkus-auth/pom.xml index 5a210aa1b0..735a02bf25 100644 --- a/examples/quarkus-auth/pom.xml +++ b/examples/quarkus-auth/pom.xml @@ -1,235 +1,235 @@ - 4.0.0 + 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-quarkus-auth + apicurio-registry-examples-quarkus-auth - - 17 - 17 - UTF-8 - UTF-8 + + 17 + 17 + UTF-8 + UTF-8 - io.quarkus - quarkus-universe-bom - 2.7.5.Final - 2.7.5.Final + io.quarkus + quarkus-universe-bom + 2.7.5.Final + 2.7.5.Final - 1.10.0 + 1.10.0 - - - - - - ${quarkus.platform.group-id} - ${quarkus.platform.artifact-id} - ${quarkus.platform.version} - pom - import - - - + + + + ${quarkus.platform.group-id} + ${quarkus.platform.artifact-id} + ${quarkus.platform.version} + pom + import + + + - - org.apache.avro - avro - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - test - - - io.quarkus - quarkus-apicurio-registry-avro - + - - io.quarkus - quarkus-resteasy-jackson - - - com.github.java-json-tools - jackson-coreutils - - - - - io.quarkus - quarkus-smallrye-health - - - com.github.java-json-tools - jackson-coreutils - 2.0 - compile - + + org.apache.avro + avro + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + test + + + io.quarkus + quarkus-apicurio-registry-avro + - - io.quarkus - quarkus-smallrye-reactive-messaging-kafka - + + io.quarkus + quarkus-resteasy-jackson + + + com.github.java-json-tools + jackson-coreutils + + + + + io.quarkus + quarkus-smallrye-health + + + com.github.java-json-tools + jackson-coreutils + 2.0 + compile + - - io.strimzi - kafka-oauth-client - 0.7.2 - + + io.quarkus + quarkus-smallrye-reactive-messaging-kafka + - - io.quarkus - quarkus-config-yaml - + + io.strimzi + kafka-oauth-client + 0.7.2 + - + + io.quarkus + quarkus-config-yaml + + + - + + + + io.quarkus + quarkus-maven-plugin + ${quarkus-plugin.version} + true + + + + build + generate-code + generate-code-tests + + + + + + maven-compiler-plugin + + + maven-surefire-plugin + + + org.jboss.logmanager.LogManager + ${maven.home} + + + + + + + + avro + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + + schema + + generate-sources + + ${project.basedir}/src/main/resources/avro/schema/ + ${project.basedir}/src/main/java/ + + + + + + + + + upload + + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + + register + + generate-sources + + http://localhost:8181/apis/registry/v2 + AVRO + + ${project.basedir}/src/main/resources/avro/schema/event.avsc + + + + + + + + + + test + - - io.quarkus - quarkus-maven-plugin - ${quarkus-plugin.version} - true - - - - build - generate-code - generate-code-tests - - - - - - maven-compiler-plugin - - - maven-surefire-plugin + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + + test-update + + generate-sources + + http://localhost:8181/apis/registry/v2 + AVRO + + ${project.basedir}/src/main/resources/avro/schema/event.avsc + + + + + + + + + + download + + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + + download + + generate-sources - - org.jboss.logmanager.LogManager - ${maven.home} - + http://localhost:8181/apis/registry/v2 + + events-value + + ${project.build.directory} - + + + - - - - avro - - - - org.apache.avro - avro-maven-plugin - ${avro.version} - - - generate-sources - - schema - - - ${project.basedir}/src/main/resources/avro/schema/ - ${project.basedir}/src/main/java/ - - - - - - - - - upload - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - generate-sources - - register - - - http://localhost:8181/apis/registry/v2 - AVRO - - ${project.basedir}/src/main/resources/avro/schema/event.avsc - - - - - - - - - - test - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - generate-sources - - test-update - - - http://localhost:8181/apis/registry/v2 - AVRO - - ${project.basedir}/src/main/resources/avro/schema/event.avsc - - - - - - - - - - download - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - generate-sources - - download - - - http://localhost:8181/apis/registry/v2 - - events-value - - ${project.build.directory} - - - - - - - - + + + diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java index e587e5d6f8..717172a16f 100644 --- a/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Consumer.java @@ -1,12 +1,11 @@ package io.apicurio.example; -import javax.enterprise.context.ApplicationScoped; - +import io.apicurio.example.schema.avro.Event; import org.eclipse.microprofile.reactive.messaging.Incoming; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import io.apicurio.example.schema.avro.Event; +import javax.enterprise.context.ApplicationScoped; @ApplicationScoped public class Consumer { diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java index 13b5ac01b4..ef20e26639 100644 --- a/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/InputEvent.java @@ -1,19 +1,22 @@ package io.apicurio.example; public class InputEvent { - + private String name; private String description; public String getName() { return name; } + public void setName(String name) { this.name = name; } + public String getDescription() { return description; } + public void setDescription(String description) { this.description = description; } diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java index a4503109f5..a57212852c 100644 --- a/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Producer.java @@ -1,14 +1,13 @@ package io.apicurio.example; -import javax.enterprise.context.ApplicationScoped; -import javax.inject.Inject; - +import io.apicurio.example.schema.avro.Event; import org.eclipse.microprofile.reactive.messaging.Channel; import org.eclipse.microprofile.reactive.messaging.Emitter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import io.apicurio.example.schema.avro.Event; +import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; @ApplicationScoped public class Producer { diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java index 561c956dec..bf9854f75d 100644 --- a/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/Resource.java @@ -1,13 +1,12 @@ package io.apicurio.example; -import javax.inject.Inject; -import javax.ws.rs.POST; -import javax.ws.rs.Path; - +import io.apicurio.example.schema.avro.Event; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import io.apicurio.example.schema.avro.Event; +import javax.inject.Inject; +import javax.ws.rs.POST; +import javax.ws.rs.Path; @Path("/kafka") public class Resource { diff --git a/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java b/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java index 5d33bf7778..b286f8f3e1 100644 --- a/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java +++ b/examples/quarkus-auth/src/main/java/io/apicurio/example/schema/avro/Event.java @@ -5,487 +5,526 @@ */ package io.apicurio.example.schema.avro; -import org.apache.avro.specific.SpecificData; -import org.apache.avro.util.Utf8; -import org.apache.avro.message.BinaryMessageEncoder; import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.BinaryMessageEncoder; import org.apache.avro.message.SchemaStore; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; /** Avro Schema for Event */ @org.apache.avro.specific.AvroGenerated -public class Event extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - private static final long serialVersionUID = -3808115584469037383L; - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Event\",\"namespace\":\"io.apicurio.example.schema.avro\",\"doc\":\"Avro Schema for Event\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"description\",\"type\":\"string\"},{\"name\":\"source\",\"type\":[\"null\",\"string\"],\"default\":null}]}"); - public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } - - private static SpecificData MODEL$ = new SpecificData(); - - private static final BinaryMessageEncoder ENCODER = - new BinaryMessageEncoder(MODEL$, SCHEMA$); - - private static final BinaryMessageDecoder DECODER = - new BinaryMessageDecoder(MODEL$, SCHEMA$); - - /** - * Return the BinaryMessageEncoder instance used by this class. - * @return the message encoder used by this class - */ - public static BinaryMessageEncoder getEncoder() { - return ENCODER; - } - - /** - * Return the BinaryMessageDecoder instance used by this class. - * @return the message decoder used by this class - */ - public static BinaryMessageDecoder getDecoder() { - return DECODER; - } - - /** - * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. - * @param resolver a {@link SchemaStore} used to find schemas by fingerprint - * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore - */ - public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { - return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); - } - - /** - * Serializes this Event to a ByteBuffer. - * @return a buffer holding the serialized data for this instance - * @throws java.io.IOException if this instance could not be serialized - */ - public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { - return ENCODER.encode(this); - } - - /** - * Deserializes a Event from a ByteBuffer. - * @param b a byte buffer holding serialized data for an instance of this class - * @return a Event instance decoded from the given buffer - * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class - */ - public static Event fromByteBuffer( - java.nio.ByteBuffer b) throws java.io.IOException { - return DECODER.decode(b); - } - - private java.lang.CharSequence name; - private java.lang.CharSequence description; - private java.lang.CharSequence source; - - /** - * Default constructor. Note that this does not initialize fields - * to their default values from the schema. If that is desired then - * one should use newBuilder(). - */ - public Event() {} - - /** - * All-args constructor. - * @param name The new value for name - * @param description The new value for description - * @param source The new value for source - */ - public Event(java.lang.CharSequence name, java.lang.CharSequence description, java.lang.CharSequence source) { - this.name = name; - this.description = description; - this.source = source; - } - - @Override -public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } - @Override -public org.apache.avro.Schema getSchema() { return SCHEMA$; } - // Used by DatumWriter. Applications should not call. - @Override -public java.lang.Object get(int field$) { - switch (field$) { - case 0: return name; - case 1: return description; - case 2: return source; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - // Used by DatumReader. Applications should not call. - @Override -@SuppressWarnings(value="unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: name = (java.lang.CharSequence)value$; break; - case 1: description = (java.lang.CharSequence)value$; break; - case 2: source = (java.lang.CharSequence)value$; break; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); +public class Event extends org.apache.avro.specific.SpecificRecordBase + implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -3808115584469037383L; + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"Event\",\"namespace\":\"io.apicurio.example.schema.avro\",\"doc\":\"Avro Schema for Event\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"description\",\"type\":\"string\"},{\"name\":\"source\",\"type\":[\"null\",\"string\"],\"default\":null}]}"); + + public static org.apache.avro.Schema getClassSchema() { + return SCHEMA$; } - } - - /** - * Gets the value of the 'name' field. - * @return The value of the 'name' field. - */ - public java.lang.CharSequence getName() { - return name; - } - - - /** - * Sets the value of the 'name' field. - * @param value the value to set. - */ - public void setName(java.lang.CharSequence value) { - this.name = value; - } - - /** - * Gets the value of the 'description' field. - * @return The value of the 'description' field. - */ - public java.lang.CharSequence getDescription() { - return description; - } - - - /** - * Sets the value of the 'description' field. - * @param value the value to set. - */ - public void setDescription(java.lang.CharSequence value) { - this.description = value; - } - - /** - * Gets the value of the 'source' field. - * @return The value of the 'source' field. - */ - public java.lang.CharSequence getSource() { - return source; - } - - - /** - * Sets the value of the 'source' field. - * @param value the value to set. - */ - public void setSource(java.lang.CharSequence value) { - this.source = value; - } - - /** - * Creates a new Event RecordBuilder. - * @return A new Event RecordBuilder - */ - public static io.apicurio.example.schema.avro.Event.Builder newBuilder() { - return new io.apicurio.example.schema.avro.Event.Builder(); - } - - /** - * Creates a new Event RecordBuilder by copying an existing Builder. - * @param other The existing builder to copy. - * @return A new Event RecordBuilder - */ - public static io.apicurio.example.schema.avro.Event.Builder newBuilder(io.apicurio.example.schema.avro.Event.Builder other) { - if (other == null) { - return new io.apicurio.example.schema.avro.Event.Builder(); - } else { - return new io.apicurio.example.schema.avro.Event.Builder(other); - } - } - - /** - * Creates a new Event RecordBuilder by copying an existing Event instance. - * @param other The existing instance to copy. - * @return A new Event RecordBuilder - */ - public static io.apicurio.example.schema.avro.Event.Builder newBuilder(io.apicurio.example.schema.avro.Event other) { - if (other == null) { - return new io.apicurio.example.schema.avro.Event.Builder(); - } else { - return new io.apicurio.example.schema.avro.Event.Builder(other); - } - } - /** - * RecordBuilder for Event instances. - */ - @org.apache.avro.specific.AvroGenerated - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { + private static SpecificData MODEL$ = new SpecificData(); - private java.lang.CharSequence name; - private java.lang.CharSequence description; - private java.lang.CharSequence source; + private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder(MODEL$, + SCHEMA$); - /** Creates a new Builder */ - private Builder() { - super(SCHEMA$); - } + private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder(MODEL$, + SCHEMA$); /** - * Creates a Builder by copying an existing Builder. - * @param other The existing Builder to copy. + * Return the BinaryMessageEncoder instance used by this class. + * + * @return the message encoder used by this class */ - private Builder(io.apicurio.example.schema.avro.Event.Builder other) { - super(other); - if (isValidValue(fields()[0], other.name)) { - this.name = data().deepCopy(fields()[0].schema(), other.name); - fieldSetFlags()[0] = other.fieldSetFlags()[0]; - } - if (isValidValue(fields()[1], other.description)) { - this.description = data().deepCopy(fields()[1].schema(), other.description); - fieldSetFlags()[1] = other.fieldSetFlags()[1]; - } - if (isValidValue(fields()[2], other.source)) { - this.source = data().deepCopy(fields()[2].schema(), other.source); - fieldSetFlags()[2] = other.fieldSetFlags()[2]; - } + public static BinaryMessageEncoder getEncoder() { + return ENCODER; } /** - * Creates a Builder by copying an existing Event instance - * @param other The existing instance to copy. + * Return the BinaryMessageDecoder instance used by this class. + * + * @return the message decoder used by this class */ - private Builder(io.apicurio.example.schema.avro.Event other) { - super(SCHEMA$); - if (isValidValue(fields()[0], other.name)) { - this.name = data().deepCopy(fields()[0].schema(), other.name); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.description)) { - this.description = data().deepCopy(fields()[1].schema(), other.description); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.source)) { - this.source = data().deepCopy(fields()[2].schema(), other.source); - fieldSetFlags()[2] = true; - } + public static BinaryMessageDecoder getDecoder() { + return DECODER; } /** - * Gets the value of the 'name' field. - * @return The value. - */ - public java.lang.CharSequence getName() { - return name; + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); } - /** - * Sets the value of the 'name' field. - * @param value The value of 'name'. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder setName(java.lang.CharSequence value) { - validate(fields()[0], value); - this.name = value; - fieldSetFlags()[0] = true; - return this; + * Serializes this Event to a ByteBuffer. + * + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); } /** - * Checks whether the 'name' field has been set. - * @return True if the 'name' field has been set, false otherwise. - */ - public boolean hasName() { - return fieldSetFlags()[0]; + * Deserializes a Event from a ByteBuffer. + * + * @param b a byte buffer holding serialized data for an instance of this class + * @return a Event instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static Event fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); } + private java.lang.CharSequence name; + private java.lang.CharSequence description; + private java.lang.CharSequence source; /** - * Clears the value of the 'name' field. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder clearName() { - name = null; - fieldSetFlags()[0] = false; - return this; + * Default constructor. Note that this does not initialize fields to their default values from the schema. + * If that is desired then one should use newBuilder(). + */ + public Event() { } /** - * Gets the value of the 'description' field. - * @return The value. - */ - public java.lang.CharSequence getDescription() { - return description; + * All-args constructor. + * + * @param name The new value for name + * @param description The new value for description + * @param source The new value for source + */ + public Event(java.lang.CharSequence name, java.lang.CharSequence description, + java.lang.CharSequence source) { + this.name = name; + this.description = description; + this.source = source; } + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { + return MODEL$; + } + + @Override + public org.apache.avro.Schema getSchema() { + return SCHEMA$; + } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: + return name; + case 1: + return description; + case 2: + return source; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value = "unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: + name = (java.lang.CharSequence) value$; + break; + case 1: + description = (java.lang.CharSequence) value$; + break; + case 2: + source = (java.lang.CharSequence) value$; + break; + default: + throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } /** - * Sets the value of the 'description' field. - * @param value The value of 'description'. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder setDescription(java.lang.CharSequence value) { - validate(fields()[1], value); - this.description = value; - fieldSetFlags()[1] = true; - return this; + * Gets the value of the 'name' field. + * + * @return The value of the 'name' field. + */ + public java.lang.CharSequence getName() { + return name; } /** - * Checks whether the 'description' field has been set. - * @return True if the 'description' field has been set, false otherwise. - */ - public boolean hasDescription() { - return fieldSetFlags()[1]; + * Sets the value of the 'name' field. + * + * @param value the value to set. + */ + public void setName(java.lang.CharSequence value) { + this.name = value; } + /** + * Gets the value of the 'description' field. + * + * @return The value of the 'description' field. + */ + public java.lang.CharSequence getDescription() { + return description; + } /** - * Clears the value of the 'description' field. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder clearDescription() { - description = null; - fieldSetFlags()[1] = false; - return this; + * Sets the value of the 'description' field. + * + * @param value the value to set. + */ + public void setDescription(java.lang.CharSequence value) { + this.description = value; } /** - * Gets the value of the 'source' field. - * @return The value. - */ + * Gets the value of the 'source' field. + * + * @return The value of the 'source' field. + */ public java.lang.CharSequence getSource() { - return source; + return source; } - /** - * Sets the value of the 'source' field. - * @param value The value of 'source'. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder setSource(java.lang.CharSequence value) { - validate(fields()[2], value); - this.source = value; - fieldSetFlags()[2] = true; - return this; + * Sets the value of the 'source' field. + * + * @param value the value to set. + */ + public void setSource(java.lang.CharSequence value) { + this.source = value; } /** - * Checks whether the 'source' field has been set. - * @return True if the 'source' field has been set, false otherwise. - */ - public boolean hasSource() { - return fieldSetFlags()[2]; + * Creates a new Event RecordBuilder. + * + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder() { + return new io.apicurio.example.schema.avro.Event.Builder(); } - /** - * Clears the value of the 'source' field. - * @return This builder. - */ - public io.apicurio.example.schema.avro.Event.Builder clearSource() { - source = null; - fieldSetFlags()[2] = false; - return this; + * Creates a new Event RecordBuilder by copying an existing Builder. + * + * @param other The existing builder to copy. + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder( + io.apicurio.example.schema.avro.Event.Builder other) { + if (other == null) { + return new io.apicurio.example.schema.avro.Event.Builder(); + } else { + return new io.apicurio.example.schema.avro.Event.Builder(other); + } } - @Override - @SuppressWarnings("unchecked") - public Event build() { - try { - Event record = new Event(); - record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]); - record.description = fieldSetFlags()[1] ? this.description : (java.lang.CharSequence) defaultValue(fields()[1]); - record.source = fieldSetFlags()[2] ? this.source : (java.lang.CharSequence) defaultValue(fields()[2]); - return record; - } catch (org.apache.avro.AvroMissingFieldException e) { - throw e; - } catch (java.lang.Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } + /** + * Creates a new Event RecordBuilder by copying an existing Event instance. + * + * @param other The existing instance to copy. + * @return A new Event RecordBuilder + */ + public static io.apicurio.example.schema.avro.Event.Builder newBuilder( + io.apicurio.example.schema.avro.Event other) { + if (other == null) { + return new io.apicurio.example.schema.avro.Event.Builder(); + } else { + return new io.apicurio.example.schema.avro.Event.Builder(other); + } } - } - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumWriter - WRITER$ = MODEL$.createDatumWriter(SCHEMA$); + /** + * RecordBuilder for Event instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private java.lang.CharSequence name; + private java.lang.CharSequence description; + private java.lang.CharSequence source; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$); + } + + /** + * Creates a Builder by copying an existing Builder. + * + * @param other The existing Builder to copy. + */ + private Builder(io.apicurio.example.schema.avro.Event.Builder other) { + super(other); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.source)) { + this.source = data().deepCopy(fields()[2].schema(), other.source); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } - @Override public void writeExternal(java.io.ObjectOutput out) - throws java.io.IOException { - WRITER$.write(this, SpecificData.getEncoder(out)); - } + /** + * Creates a Builder by copying an existing Event instance + * + * @param other The existing instance to copy. + */ + private Builder(io.apicurio.example.schema.avro.Event other) { + super(SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.description)) { + this.description = data().deepCopy(fields()[1].schema(), other.description); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.source)) { + this.source = data().deepCopy(fields()[2].schema(), other.source); + fieldSetFlags()[2] = true; + } + } - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumReader - READER$ = MODEL$.createDatumReader(SCHEMA$); + /** + * Gets the value of the 'name' field. + * + * @return The value. + */ + public java.lang.CharSequence getName() { + return name; + } - @Override public void readExternal(java.io.ObjectInput in) - throws java.io.IOException { - READER$.read(this, SpecificData.getDecoder(in)); - } + /** + * Sets the value of the 'name' field. + * + * @param value The value of 'name'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setName(java.lang.CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } - @Override protected boolean hasCustomCoders() { return true; } + /** + * Checks whether the 'name' field has been set. + * + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[0]; + } - @Override public void customEncode(org.apache.avro.io.Encoder out) - throws java.io.IOException - { - out.writeString(this.name); + /** + * Clears the value of the 'name' field. + * + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } - out.writeString(this.description); + /** + * Gets the value of the 'description' field. + * + * @return The value. + */ + public java.lang.CharSequence getDescription() { + return description; + } - if (this.source == null) { - out.writeIndex(0); - out.writeNull(); - } else { - out.writeIndex(1); - out.writeString(this.source); - } + /** + * Sets the value of the 'description' field. + * + * @param value The value of 'description'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setDescription(java.lang.CharSequence value) { + validate(fields()[1], value); + this.description = value; + fieldSetFlags()[1] = true; + return this; + } - } - - @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) - throws java.io.IOException - { - org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); - if (fieldOrder == null) { - this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); - - this.description = in.readString(this.description instanceof Utf8 ? (Utf8)this.description : null); - - if (in.readIndex() != 1) { - in.readNull(); - this.source = null; - } else { - this.source = in.readString(this.source instanceof Utf8 ? (Utf8)this.source : null); - } - - } else { - for (int i = 0; i < 3; i++) { - switch (fieldOrder[i].pos()) { - case 0: - this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); - break; - - case 1: - this.description = in.readString(this.description instanceof Utf8 ? (Utf8)this.description : null); - break; - - case 2: - if (in.readIndex() != 1) { - in.readNull(); - this.source = null; - } else { - this.source = in.readString(this.source instanceof Utf8 ? (Utf8)this.source : null); - } - break; - - default: - throw new java.io.IOException("Corrupt ResolvingDecoder."); + /** + * Checks whether the 'description' field has been set. + * + * @return True if the 'description' field has been set, false otherwise. + */ + public boolean hasDescription() { + return fieldSetFlags()[1]; + } + + /** + * Clears the value of the 'description' field. + * + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearDescription() { + description = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'source' field. + * + * @return The value. + */ + public java.lang.CharSequence getSource() { + return source; + } + + /** + * Sets the value of the 'source' field. + * + * @param value The value of 'source'. + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder setSource(java.lang.CharSequence value) { + validate(fields()[2], value); + this.source = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'source' field has been set. + * + * @return True if the 'source' field has been set, false otherwise. + */ + public boolean hasSource() { + return fieldSetFlags()[2]; + } + + /** + * Clears the value of the 'source' field. + * + * @return This builder. + */ + public io.apicurio.example.schema.avro.Event.Builder clearSource() { + source = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public Event build() { + try { + Event record = new Event(); + record.name = fieldSetFlags()[0] ? this.name + : (java.lang.CharSequence) defaultValue(fields()[0]); + record.description = fieldSetFlags()[1] ? this.description + : (java.lang.CharSequence) defaultValue(fields()[1]); + record.source = fieldSetFlags()[2] ? this.source + : (java.lang.CharSequence) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } } - } } - } -} + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter WRITER$ = MODEL$.createDatumWriter(SCHEMA$); + @Override + public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader READER$ = MODEL$.createDatumReader(SCHEMA$); + @Override + public void readExternal(java.io.ObjectInput in) throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + @Override + protected boolean hasCustomCoders() { + return true; + } + @Override + public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { + out.writeString(this.name); + out.writeString(this.description); + if (this.source == null) { + out.writeIndex(0); + out.writeNull(); + } else { + out.writeIndex(1); + out.writeString(this.source); + } + } + @Override + public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.name = in.readString(this.name instanceof Utf8 ? (Utf8) this.name : null); + + this.description = in + .readString(this.description instanceof Utf8 ? (Utf8) this.description : null); + + if (in.readIndex() != 1) { + in.readNull(); + this.source = null; + } else { + this.source = in.readString(this.source instanceof Utf8 ? (Utf8) this.source : null); + } + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.name = in.readString(this.name instanceof Utf8 ? (Utf8) this.name : null); + break; + + case 1: + this.description = in.readString( + this.description instanceof Utf8 ? (Utf8) this.description : null); + break; + + case 2: + if (in.readIndex() != 1) { + in.readNull(); + this.source = null; + } else { + this.source = in + .readString(this.source instanceof Utf8 ? (Utf8) this.source : null); + } + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} diff --git a/examples/rest-client-downstream/pom.xml b/examples/rest-client-downstream/pom.xml index 5f7330b0fd..edf1632c61 100644 --- a/examples/rest-client-downstream/pom.xml +++ b/examples/rest-client-downstream/pom.xml @@ -1,37 +1,35 @@ - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-rest-client-downstream - jar + apicurio-registry-examples-rest-client-downstream + jar - - - 2.2.1.Final - 17 - 17 - + + + 2.2.1.Final + 17 + 17 + - - - io.apicurio - apicurio-registry-client - ${apicurio-registry.version} - + + + io.apicurio + apicurio-registry-client + ${apicurio-registry.version} + - - org.slf4j - slf4j-jdk14 - ${slf4j.version} - - + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + - \ No newline at end of file + diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java index c6420d39db..fbfa790bfe 100644 --- a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/Constants.java @@ -2,23 +2,11 @@ public class Constants { - public static final String SCHEMA = "{" + - " \"$id\": \"https://example.com/message.schema.json\"," + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + - " \"required\": [" + - " \"message\"," + - " \"time\"" + - " ]," + - " \"type\": \"object\"," + - " \"properties\": {" + - " \"message\": {" + - " \"description\": \"\"," + - " \"type\": \"string\"" + - " }," + - " \"time\": {" + - " \"description\": \"\"," + - " \"type\": \"number\"" + - " }" + - " }" + - "}"; + public static final String SCHEMA = "{" + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + " \"required\": [" + + " \"message\"," + " \"time\"" + " ]," + " \"type\": \"object\"," + + " \"properties\": {" + " \"message\": {" + " \"description\": \"\"," + + " \"type\": \"string\"" + " }," + " \"time\": {" + + " \"description\": \"\"," + " \"type\": \"number\"" + " }" + + " }" + "}"; } diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java index c2656f5a9a..0cd382edc3 100644 --- a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java @@ -1,8 +1,5 @@ package io.apicurio.registry.examples; -import java.util.Collections; -import java.util.UUID; - import io.apicurio.registry.examples.util.RegistryDemoUtil; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rest.client.RegistryClientFactory; @@ -11,13 +8,13 @@ import io.apicurio.rest.client.auth.exception.AuthErrorHandler; import io.apicurio.rest.client.spi.ApicurioHttpClient; +import java.util.Collections; +import java.util.UUID; /** * Simple demo app that shows how to use the client. *

- * 1) Register a new schema in the Registry. - * 2) Fetch the newly created schema. - * 3) Delete the schema. + * 1) Register a new schema in the Registry. 2) Fetch the newly created schema. 3) Delete the schema. * * @author Carles Arnal */ @@ -37,14 +34,15 @@ public static void main(String[] args) throws Exception { RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); - //Wait for the artifact to be available. + // Wait for the artifact to be available. Thread.sleep(1000); RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); RegistryDemoUtil.deleteSchema(client, artifactId); - //Required due to a bug in the version of registry libraries used. Once the new version is released, we'll be able to remove this. + // Required due to a bug in the version of registry libraries used. Once the new version is released, + // we'll be able to remove this. System.exit(0); } @@ -55,8 +53,10 @@ public static RegistryClient createProperClient(String registryUrl) { if (tokenEndpoint != null) { final String authClient = System.getenv("AUTH_CLIENT_ID"); final String authSecret = System.getenv("AUTH_CLIENT_SECRET"); - ApicurioHttpClient httpClient = new JdkHttpClientProvider().create(tokenEndpoint, Collections.emptyMap(), null, new AuthErrorHandler()); - return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), new OidcAuth(httpClient, authClient, authSecret)); + ApicurioHttpClient httpClient = new JdkHttpClientProvider().create(tokenEndpoint, + Collections.emptyMap(), null, new AuthErrorHandler()); + return RegistryClientFactory.create(registryUrl, Collections.emptyMap(), + new OidcAuth(httpClient, authClient, authSecret)); } else { return RegistryClientFactory.create(registryUrl); } diff --git a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java index 797969c4bd..8206037a28 100644 --- a/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java +++ b/examples/rest-client-downstream/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java @@ -1,15 +1,14 @@ package io.apicurio.registry.examples.util; -import java.io.ByteArrayInputStream; -import java.nio.charset.StandardCharsets; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rest.v2.beans.ArtifactMetaData; import io.apicurio.registry.rest.v2.beans.IfExists; import io.apicurio.registry.types.ArtifactType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; public class RegistryDemoUtil { @@ -21,13 +20,16 @@ public class RegistryDemoUtil { * @param artifactId * @param schema */ - public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, String schema) { + public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, + String schema) { LOGGER.info("---------------------------------------------------------"); LOGGER.info("=====> Creating artifact in the registry for JSON Schema with ID: {}", artifactId); try { - final ByteArrayInputStream content = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); - final ArtifactMetaData metaData = service.createArtifact("default", artifactId, ArtifactType.JSON, IfExists.RETURN, content); + final ByteArrayInputStream content = new ByteArrayInputStream( + schema.getBytes(StandardCharsets.UTF_8)); + final ArtifactMetaData metaData = service.createArtifact("default", artifactId, ArtifactType.JSON, + IfExists.RETURN, content); assert metaData != null; LOGGER.info("=====> Successfully created JSON Schema artifact in Service Registry: {}", metaData); LOGGER.info("---------------------------------------------------------"); diff --git a/examples/rest-client/pom.xml b/examples/rest-client/pom.xml index b6e45ee3a7..37ccc36ca1 100644 --- a/examples/rest-client/pom.xml +++ b/examples/rest-client/pom.xml @@ -1,36 +1,34 @@ - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-rest-client - jar + apicurio-registry-examples-rest-client + jar - - - io.apicurio - apicurio-registry-java-sdk - ${project.version} - + + + io.apicurio + apicurio-registry-java-sdk + ${project.version} + - - io.apicurio - apicurio-common-rest-client-vertx - ${apicurio-common-rest-client.version} - + + io.apicurio + apicurio-common-rest-client-vertx + ${apicurio-common-rest-client.version} + - - org.slf4j - slf4j-jdk14 - ${slf4j.version} - - + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + - \ No newline at end of file + diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java index c6420d39db..fbfa790bfe 100644 --- a/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/Constants.java @@ -2,23 +2,11 @@ public class Constants { - public static final String SCHEMA = "{" + - " \"$id\": \"https://example.com/message.schema.json\"," + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + - " \"required\": [" + - " \"message\"," + - " \"time\"" + - " ]," + - " \"type\": \"object\"," + - " \"properties\": {" + - " \"message\": {" + - " \"description\": \"\"," + - " \"type\": \"string\"" + - " }," + - " \"time\": {" + - " \"description\": \"\"," + - " \"type\": \"number\"" + - " }" + - " }" + - "}"; + public static final String SCHEMA = "{" + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + " \"required\": [" + + " \"message\"," + " \"time\"" + " ]," + " \"type\": \"object\"," + + " \"properties\": {" + " \"message\": {" + " \"description\": \"\"," + + " \"type\": \"string\"" + " }," + " \"time\": {" + + " \"description\": \"\"," + " \"type\": \"number\"" + " }" + + " }" + "}"; } diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java index b146322046..b9ec304ec8 100644 --- a/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/RegistryLoader.java @@ -80,7 +80,8 @@ public void run() { createArtifact.setFirstVersion(createVersion); VersionContent versionContent = new VersionContent(); createVersion.setContent(versionContent); - versionContent.setContent(simpleAvro.replace("userInfo", "userInfo" + threadId + numArtifacts)); + versionContent + .setContent(simpleAvro.replace("userInfo", "userInfo" + threadId + numArtifacts)); versionContent.setContentType("application/json"); client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { @@ -89,7 +90,8 @@ public void run() { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - client.groups().byGroupId("default").artifacts().byArtifactId(artifactId).rules().post(createRule); + client.groups().byGroupId("default").artifacts().byArtifactId(artifactId).rules() + .post(createRule); } } } diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java index ab9055aad8..bf9c59e550 100644 --- a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemo.java @@ -12,9 +12,7 @@ /** * Simple demo app that shows how to use the client. *

- * 1) Register a new schema in the Registry. - * 2) Fetch the newly created schema. - * 3) Delete the schema. + * 1) Register a new schema in the Registry. 2) Fetch the newly created schema. 3) Delete the schema. * * @author Carles Arnal */ @@ -36,7 +34,7 @@ public static void main(String[] args) throws Exception { RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); - //Wait for the artifact to be available. + // Wait for the artifact to be available. Thread.sleep(1000); RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java index 1930cc6e81..ca0223f721 100644 --- a/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/SimpleRegistryDemoBasicAuth.java @@ -12,9 +12,7 @@ /** * Simple demo app that shows how to use the client. *

- * 1) Register a new schema in the Registry. - * 2) Fetch the newly created schema. - * 3) Delete the schema. + * 1) Register a new schema in the Registry. 2) Fetch the newly created schema. 3) Delete the schema. * * @author Carles Arnal */ @@ -34,7 +32,7 @@ public static void main(String[] args) throws Exception { RegistryDemoUtil.createSchemaInServiceRegistry(client, artifactId, Constants.SCHEMA); - //Wait for the artifact to be available. + // Wait for the artifact to be available. Thread.sleep(1000); RegistryDemoUtil.getSchemaFromRegistry(client, artifactId); diff --git a/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java b/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java index 6f9b816089..ee7b80a749 100644 --- a/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java +++ b/examples/rest-client/src/main/java/io/apicurio/registry/examples/util/RegistryDemoUtil.java @@ -23,12 +23,14 @@ public class RegistryDemoUtil { * @param artifactId * @param schema */ - public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, String schema) { + public static void createSchemaInServiceRegistry(RegistryClient service, String artifactId, + String schema) { LOGGER.info("---------------------------------------------------------"); LOGGER.info("=====> Creating artifact in the registry for JSON Schema with ID: {}", artifactId); try { - final ByteArrayInputStream content = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); + final ByteArrayInputStream content = new ByteArrayInputStream( + schema.getBytes(StandardCharsets.UTF_8)); CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); @@ -38,9 +40,10 @@ public static void createSchemaInServiceRegistry(RegistryClient service, String createArtifact.getFirstVersion().getContent().setContent(IoUtil.toString(content)); createArtifact.getFirstVersion().getContent().setContentType("application/json"); - final io.apicurio.registry.rest.client.models.VersionMetaData metaData = service.groups().byGroupId("default").artifacts().post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; - }).getVersion(); + final io.apicurio.registry.rest.client.models.VersionMetaData metaData = service.groups() + .byGroupId("default").artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; + }).getVersion(); assert metaData != null; LOGGER.info("=====> Successfully created JSON Schema artifact in Service Registry: {}", metaData); @@ -60,7 +63,8 @@ public static ArtifactMetaData getSchemaFromRegistry(RegistryClient service, Str LOGGER.info("---------------------------------------------------------"); LOGGER.info("=====> Fetching artifact from the registry for JSON Schema with ID: {}", artifactId); try { - final ArtifactMetaData metaData = service.groups().byGroupId("default").artifacts().byArtifactId(artifactId).get(); + final ArtifactMetaData metaData = service.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).get(); assert metaData != null; LOGGER.info("=====> Successfully fetched JSON Schema artifact in Service Registry: {}", metaData); LOGGER.info("---------------------------------------------------------"); diff --git a/examples/serdes-with-references/pom.xml b/examples/serdes-with-references/pom.xml index 461c996a9a..673b464807 100644 --- a/examples/serdes-with-references/pom.xml +++ b/examples/serdes-with-references/pom.xml @@ -1,166 +1,160 @@ - - - apicurio-registry-examples - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-references + apicurio-registry-examples-references - - 4.26.1 - 0.6.1 - 2.40.0 - + + 4.26.1 + 0.6.1 + 2.40.0 + - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - ${project.version} - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - ${project.version} - - - io.apicurio - apicurio-registry-protobuf-schema-utilities - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-simple - ${slf4j.version} - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - com.google.protobuf - protobuf-java-util - ${protobuf.version} - - - com.google.api.grpc - proto-google-common-protos - ${protobuf.googleapi.types.version} - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + ${project.version} + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${project.version} + + + io.apicurio + apicurio-registry-protobuf-schema-utilities + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + com.google.api.grpc + proto-google-common-protos + ${protobuf.googleapi.types.version} + + + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + ./src/main/resources/schema + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + org.apache.avro + avro-maven-plugin + ${version.avro} + + + + schema + + generate-sources + + String + + ${project.basedir}/src/main/resources/Exchange.avsc + ${project.basedir}/src/main/resources/TradeKey.avsc + ${project.basedir}/src/main/resources/TradeRaw.avsc + + ${project.basedir}/src/main/resources/ + ${project.basedir}/target/generated-sources/avro + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-dist + + copy-resources + + prepare-package + + ${project.build.outputDirectory} + + + ${project.basedir}/target/generated-sources/protobuf/ + false + + + + + + + + + - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - ./src/main/resources/schema - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - org.apache.avro - avro-maven-plugin - ${version.avro} - - - generate-sources - - schema - - - String - - ${project.basedir}/src/main/resources/Exchange.avsc - ${project.basedir}/src/main/resources/TradeKey.avsc - ${project.basedir}/src/main/resources/TradeRaw.avsc - - ${project.basedir}/src/main/resources/ - ${project.basedir}/target/generated-sources/avro - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-dist - prepare-package - - copy-resources - - - ${project.build.outputDirectory} - - - ${project.basedir}/target/generated-sources/protobuf/ - false - - - - - - - - - - - - \ No newline at end of file + diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java index e6cf444580..a6fdaebb04 100644 --- a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/AvroSerdeReferencesExample.java @@ -31,7 +31,7 @@ public class AvroSerdeReferencesExample { private static final String TOPIC_NAME = AvroSerdeReferencesExample.class.getSimpleName(); private static final String SUBJECT_NAME = "Trade"; - public static void main(String [] args) throws Exception { + public static void main(String[] args) throws Exception { System.out.println("Starting example " + AvroSerdeReferencesExample.class.getSimpleName()); String topicName = TOPIC_NAME; String subjectName = SUBJECT_NAME; @@ -52,7 +52,8 @@ public static void main(String [] args) throws Exception { tradeRaw.setSymbol("testSymbol"); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, tradeRaw); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + tradeRaw); producer.send(producedRecord); Thread.sleep(100); @@ -82,10 +83,12 @@ public static void main(String [] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - TradeRaw tradeRaw = record.value(); - System.out.println("Consumed a message: " + tradeRaw.getPayload() + " @ " + tradeRaw.getTradeKey().getKey()); - }); + } else + records.forEach(record -> { + TradeRaw tradeRaw = record.value(); + System.out.println("Consumed a message: " + tradeRaw.getPayload() + " @ " + + tradeRaw.getTradeKey().getKey()); + }); } } finally { consumer.close(); @@ -115,8 +118,7 @@ private static Producer createKafkaProducer() { props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); props.put(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); - - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -138,17 +140,18 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON); props.putIfAbsent(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -167,13 +170,16 @@ public static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java index ba4ab5ce91..b6ada3ad31 100644 --- a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/JsonSerdeReferencesExample.java @@ -48,7 +48,6 @@ public static void main(String[] args) throws Exception { String topicName = TOPIC_NAME; String subjectName = SUBJECT_NAME; - RegistryClient client = createRegistryClient(REGISTRY_URL); InputStream citySchema = JsonSerdeReferencesExample.class.getClassLoader() @@ -65,9 +64,10 @@ public static void main(String[] args) throws Exception { createArtifact.getFirstVersion().getContent().setContent(IoUtil.toString(citySchema)); createArtifact.getFirstVersion().getContent().setContentType("application/json"); - final io.apicurio.registry.rest.client.models.VersionMetaData amdCity = client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; - }).getVersion(); + final io.apicurio.registry.rest.client.models.VersionMetaData amdCity = client.groups() + .byGroupId("default").artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; + }).getVersion(); final ArtifactReference reference = new ArtifactReference(); reference.setVersion(amdCity.getVersion()); @@ -86,13 +86,13 @@ public static void main(String[] args) throws Exception { citizenCreateArtifact.getFirstVersion().setContent(new VersionContent()); citizenCreateArtifact.getFirstVersion().getContent().setContent(IoUtil.toString(citizenSchema)); citizenCreateArtifact.getFirstVersion().getContent().setContentType("application/json"); - citizenCreateArtifact.getFirstVersion().getContent().setReferences(Collections.singletonList(reference)); + citizenCreateArtifact.getFirstVersion().getContent() + .setReferences(Collections.singletonList(reference)); client.groups().byGroupId("default").artifacts().post(citizenCreateArtifact, config -> { config.queryParameters.ifExists = io.apicurio.registry.rest.client.models.IfArtifactExists.FIND_OR_CREATE_VERSION; }); - // Create the producer. Producer producer = createKafkaProducer(); // Produce 5 messages. @@ -175,7 +175,7 @@ private static Producer createKafkaProducer() { // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -206,10 +206,10 @@ private static KafkaConsumer createKafkaConsumer() { // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -232,10 +232,12 @@ public static void configureSecurityIfPresent(Properties props) { "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( - "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " - + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " - + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } @@ -245,7 +247,7 @@ public static void configureSecurityIfPresent(Properties props) { private static RegistryClient createRegistryClient(String registryUrl) { final String tokenEndpoint = System.getenv(SchemaResolverConfig.AUTH_TOKEN_ENDPOINT); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. if (tokenEndpoint != null) { final String authClient = System.getenv(SchemaResolverConfig.AUTH_CLIENT_ID); final String authSecret = System.getenv(SchemaResolverConfig.AUTH_CLIENT_SECRET); diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java index 39714e2534..1ccae9c316 100644 --- a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/ProtobufSerdeReferencesExample.java @@ -123,7 +123,7 @@ private static Producer createKafkaProducer() { props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -151,10 +151,10 @@ private static KafkaConsumer createKafkaConsumer() { // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -177,10 +177,12 @@ public static void configureSecurityIfPresent(Properties props) { "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( - "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " - + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " - + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } \ No newline at end of file diff --git a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java index bb555fbfbe..347305ee4d 100644 --- a/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java +++ b/examples/serdes-with-references/src/main/java/io/apicurio/registry/examples/references/model/Citizen.java @@ -20,13 +20,17 @@ public class Citizen { - @JsonProperty("firstName") private String firstName; + @JsonProperty("firstName") + private String firstName; - @JsonProperty("lastName") private String lastName; + @JsonProperty("lastName") + private String lastName; - @JsonProperty("age") private int age; + @JsonProperty("age") + private int age; - @JsonProperty("city") City city; + @JsonProperty("city") + City city; public Citizen() { } diff --git a/examples/simple-avro-downstream/pom.xml b/examples/simple-avro-downstream/pom.xml index 6e667193f6..017450015d 100644 --- a/examples/simple-avro-downstream/pom.xml +++ b/examples/simple-avro-downstream/pom.xml @@ -1,40 +1,38 @@ - - - apicurio-registry-examples - io.apicurio - 3.0.0-SNAPSHOT - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + - apicurio-registry-examples-simple-avro-downstream - jar + apicurio-registry-examples-simple-avro-downstream + jar - - - 2.2.1.Final - 17 - 17 - + + + 2.2.1.Final + 17 + 17 + - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${apicurio-registry.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.9.0 - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${apicurio-registry.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.9.0 + + - \ No newline at end of file + diff --git a/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java b/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java index b890e7ba63..08a085f36a 100644 --- a/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java +++ b/examples/simple-avro-downstream/src/main/java/avro/SimpleAvroExample.java @@ -39,21 +39,19 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type. The following aspects are demonstrated: *

    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Avro schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -121,15 +119,16 @@ public static void main(String[] args) throws Exception { } else records.forEach(record -> { GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date( - (long) value.get("Time"))); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Time"))); }); } } System.out.println("Done (success)."); - //Required due to a bug in the version of registry libraries used. Once the new version is released, we'll be able to remove this. + // Required due to a bug in the version of registry libraries used. Once the new version is released, + // we'll be able to remove this. System.exit(0); } @@ -152,7 +151,7 @@ private static Producer createKafkaProducer(String registryURL, // Register the artifact if not found in the registry. props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurity(props); // Create the Kafka producer @@ -180,10 +179,10 @@ private static KafkaConsumer createKafkaConsumer(String reg // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, registryURL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurity(props); // Create the Kafka Consumer @@ -202,9 +201,11 @@ private static void configureSecurity(Properties props) { props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format( - "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " - + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " - + " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } \ No newline at end of file diff --git a/examples/simple-avro-maven/pom.xml b/examples/simple-avro-maven/pom.xml index db910e0eeb..59dce10a66 100644 --- a/examples/simple-avro-maven/pom.xml +++ b/examples/simple-avro-maven/pom.xml @@ -1,72 +1,71 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-simple-avro-maven - jar + apicurio-registry-examples-simple-avro-maven + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - io.apicurio - apicurio-registry-java-sdk - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - - javax.ws.rs - javax.ws.rs-api - 2.1.1 - compile - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + io.apicurio + apicurio-registry-java-sdk + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + + javax.ws.rs + javax.ws.rs-api + 2.1.1 + compile + + - - - - io.apicurio - apicurio-registry-maven-plugin - ${project.version} - - - register-artifact - - register - - - http://localhost:8080/apis/registry/v2 - - - default - SimpleAvroMavenExample-value - AVRO - ${project.basedir}/src/main/resources/schemas/greeting.avsc - - - - - - - - + + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + + + register-artifact + + register + + + http://localhost:8080/apis/registry/v2 + + + default + SimpleAvroMavenExample-value + AVRO + ${project.basedir}/src/main/resources/schemas/greeting.avsc + + + + + + + + diff --git a/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java b/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java index a5b2e59fc2..490a4366ff 100644 --- a/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java +++ b/examples/simple-avro-maven/src/main/java/io/apicurio/registry/examples/simple/avro/maven/SimpleAvroMavenExample.java @@ -48,27 +48,26 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type and the Schema pre-registered via a Maven plugin. - * The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type and the Schema pre-registered via a Maven plugin. The following aspects are + * demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Pre-register the Avro schema in the registry via the Maven plugin
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Pre-register the Avro schema in the registry via the Maven plugin
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • - *
  • Schema is registered by executing "mvn io.apicurio:apicurio-registry-maven-plugin:register@register-artifact"
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Schema is registered by executing "mvn + * io.apicurio:apicurio-registry-maven-plugin:register@register-artifact"
  • *
*

- * Note that this application will fail if the above maven command is not run first, since - * the schema will not be present in the registry. + * Note that this application will fail if the above maven command is not run first, since the schema will not + * be present in the registry. * * @author eric.wittmann@gmail.com */ @@ -79,7 +78,6 @@ public class SimpleAvroMavenExample { private static final String TOPIC_NAME = SimpleAvroMavenExample.class.getSimpleName(); private static final String SUBJECT_NAME = "Greeting"; - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + SimpleAvroMavenExample.class.getSimpleName()); String topicName = TOPIC_NAME; @@ -93,7 +91,8 @@ public static final void main(String[] args) throws Exception { RegistryClient client = new RegistryClient(vertXRequestAdapter); String schemaData = null; - try (InputStream latestArtifact = client.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").content().get()) { + try (InputStream latestArtifact = client.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("1").content().get()) { schemaData = toString(latestArtifact); } catch (ApiException e) { System.err.println("Schema not registered in registry. Before running this example, please do:"); @@ -117,7 +116,8 @@ public static final void main(String[] args) throws Exception { record.put("Time", now.getTime()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + record); producer.send(producedRecord); Thread.sleep(100); @@ -147,10 +147,12 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Time"))); + }); } } finally { consumer.close(); @@ -179,7 +181,7 @@ private static Producer createKafkaProducer() { props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS, IfExists.RETURN.name()); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -201,15 +203,16 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -243,13 +246,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } diff --git a/examples/simple-avro/pom.xml b/examples/simple-avro/pom.xml index 93032184b6..d9a65cf9e0 100644 --- a/examples/simple-avro/pom.xml +++ b/examples/simple-avro/pom.xml @@ -1,33 +1,32 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-simple-avro - jar + apicurio-registry-examples-simple-avro + jar - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + diff --git a/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java b/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java index a0758dea85..0f87213a95 100644 --- a/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java +++ b/examples/simple-avro/src/main/java/io/apicurio/registry/examples/simple/avro/SimpleAvroExample.java @@ -39,21 +39,19 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Avro as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Avro as the serialization type. The following aspects are demonstrated: *

    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Avro schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a simple GenericRecord, no java beans needed
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Avro schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a simple GenericRecord, no java beans needed
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -66,7 +64,6 @@ public class SimpleAvroExample { private static final String SUBJECT_NAME = "Greeting"; private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"Greeting\",\"fields\":[{\"name\":\"Message\",\"type\":\"string\"},{\"name\":\"Time\",\"type\":\"long\"}]}"; - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + SimpleAvroExample.class.getSimpleName()); String topicName = TOPIC_NAME; @@ -88,7 +85,8 @@ public static final void main(String[] args) throws Exception { record.put("Time", now.getTime()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, record); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, subjectName, + record); producer.send(producedRecord); Thread.sleep(100); @@ -118,10 +116,12 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - GenericRecord value = record.value(); - System.out.println("Consumed a message: " + value.get("Message") + " @ " + new Date((long) value.get("Time"))); - }); + } else + records.forEach(record -> { + GenericRecord value = record.value(); + System.out.println("Consumed a message: " + value.get("Message") + " @ " + + new Date((long) value.get("Time"))); + }); } } finally { consumer.close(); @@ -150,7 +150,7 @@ private static Producer createKafkaProducer() { // Register the artifact if not found in the registry. props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -172,15 +172,16 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Avro - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, AvroKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + AvroKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -199,13 +200,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" "+ - " oauth.client.secret=\"%s\" "+ - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/simple-json/pom.xml b/examples/simple-json/pom.xml index 0c465976ba..5232545cb2 100644 --- a/examples/simple-json/pom.xml +++ b/examples/simple-json/pom.xml @@ -1,38 +1,37 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-simple-json - jar + apicurio-registry-examples-simple-json + jar - - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - ${project.version} - - - io.apicurio - apicurio-registry-java-sdk - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + ${project.version} + + + io.apicurio + apicurio-registry-java-sdk + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + diff --git a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java index e5dea1f388..3f204263c2 100644 --- a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java +++ b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/MessageBean.java @@ -20,10 +20,10 @@ * @author eric.wittmann@gmail.com */ public class MessageBean { - + private String message; private long time; - + /** * Constructor. */ diff --git a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java index c8aeff03b4..14e8ef73ff 100644 --- a/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java +++ b/examples/simple-json/src/main/java/io/apicurio/registry/examples/simple/json/SimpleJsonSchemaExample.java @@ -47,26 +47,24 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with JSON as the serialization type (and JSON Schema for validation). Because JSON - * Schema is only used for validation (not actual serialization), it can be enabled and disabled - * without affecting the functionality of the serializers and deserializers. However, if - * validation is disabled, then incorrect data could be consumed incorrectly. + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * JSON as the serialization type (and JSON Schema for validation). Because JSON Schema is only used for + * validation (not actual serialization), it can be enabled and disabled without affecting the functionality + * of the serializers and deserializers. However, if validation is disabled, then incorrect data could be + * consumed incorrectly. *

* The following aspects are demonstrated: - * *

    - *
  1. Register the JSON Schema in the registry
  2. - *
  3. Configuring a Kafka Serializer for use with Apicurio Registry
  4. - *
  5. Configuring a Kafka Deserializer for use with Apicurio Registry
  6. - *
  7. Data sent as a MessageBean
  8. + *
  9. Register the JSON Schema in the registry
  10. + *
  11. Configuring a Kafka Serializer for use with Apicurio Registry
  12. + *
  13. Configuring a Kafka Deserializer for use with Apicurio Registry
  14. + *
  15. Data sent as a MessageBean
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -77,33 +75,21 @@ public class SimpleJsonSchemaExample { private static final String SERVERS = "localhost:9092"; private static final String TOPIC_NAME = SimpleJsonSchemaExample.class.getSimpleName(); private static final String SUBJECT_NAME = "Greeting"; - public static final String SCHEMA = "{" + - " \"$id\": \"https://example.com/message.schema.json\"," + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + - " \"required\": [" + - " \"message\"," + - " \"time\"" + - " ]," + - " \"type\": \"object\"," + - " \"properties\": {" + - " \"message\": {" + - " \"description\": \"\"," + - " \"type\": \"string\"" + - " }," + - " \"time\": {" + - " \"description\": \"\"," + - " \"type\": \"number\"" + - " }" + - " }" + - "}"; - + public static final String SCHEMA = "{" + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + " \"required\": [" + + " \"message\"," + " \"time\"" + " ]," + " \"type\": \"object\"," + + " \"properties\": {" + " \"message\": {" + " \"description\": \"\"," + + " \"type\": \"string\"" + " }," + " \"time\": {" + + " \"description\": \"\"," + " \"type\": \"number\"" + " }" + + " }" + "}"; public static void main(String[] args) throws Exception { System.out.println("Starting example " + SimpleJsonSchemaExample.class.getSimpleName()); String topicName = TOPIC_NAME; // Register the schema with the registry (only if it is not already registered) - String artifactId = TOPIC_NAME + "-value"; // use the topic name as the artifactId because we're going to map topic name to artifactId later on. + String artifactId = TOPIC_NAME + "-value"; // use the topic name as the artifactId because we're going + // to map topic name to artifactId later on. VertXRequestAdapter vertXRequestAdapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); vertXRequestAdapter.setBaseUrl(REGISTRY_URL); @@ -115,14 +101,14 @@ public static void main(String[] args) throws Exception { createArtifact.setArtifactType(ArtifactType.JSON); createArtifact.setFirstVersion(new CreateVersion()); createArtifact.getFirstVersion().setContent(new VersionContent()); - createArtifact.getFirstVersion().getContent().setContent( - IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8)) - ); + createArtifact.getFirstVersion().getContent() + .setContent(IoUtil.toString(SCHEMA.getBytes(StandardCharsets.UTF_8))); createArtifact.getFirstVersion().getContent().setContentType("application/json"); - final VersionMetaData created = client.groups().byGroupId("default").artifacts().post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; - }).getVersion(); + final VersionMetaData created = client.groups().byGroupId("default").artifacts() + .post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.FIND_OR_CREATE_VERSION; + }).getVersion(); // Create the producer. Producer producer = createKafkaProducer(); @@ -137,7 +123,8 @@ public static void main(String[] args) throws Exception { message.setTime(System.currentTimeMillis()); // Send/produce the message on the Kafka Producer - ProducerRecord producedRecord = new ProducerRecord<>(topicName, SUBJECT_NAME, message); + ProducerRecord producedRecord = new ProducerRecord<>(topicName, SUBJECT_NAME, + message); producer.send(producedRecord); Thread.sleep(100); @@ -167,10 +154,12 @@ public static void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - MessageBean msg = record.value(); - System.out.println("Consumed a message: " + msg.getMessage() + " @ " + new Date(msg.getTime())); - }); + } else + records.forEach(record -> { + MessageBean msg = record.value(); + System.out.println( + "Consumed a message: " + msg.getMessage() + " @ " + new Date(msg.getTime())); + }); } } finally { consumer.close(); @@ -191,7 +180,8 @@ private static Producer createKafkaProducer() { props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // Use the Apicurio Registry provided Kafka Serializer for JSON Schema - props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSchemaKafkaSerializer.class.getName()); + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaSerializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); @@ -199,7 +189,7 @@ private static Producer createKafkaProducer() { props.putIfAbsent(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, "default"); props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -221,7 +211,8 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for JSON Schema - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSchemaKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + JsonSchemaKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); @@ -229,10 +220,10 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(SerdeConfig.VALIDATION_ENABLED, Boolean.TRUE); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -251,13 +242,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/simple-protobuf/pom.xml b/examples/simple-protobuf/pom.xml index acfcbe4430..1be09cf013 100644 --- a/examples/simple-protobuf/pom.xml +++ b/examples/simple-protobuf/pom.xml @@ -1,76 +1,73 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-simple-protobuf - jar + apicurio-registry-examples-simple-protobuf + jar - - 0.6.1 - + + 0.6.1 + - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - ${project.version} - - - org.apache.kafka - kafka-clients - ${kafka.version} - - - io.strimzi - kafka-oauth-client - 0.15.0 - - + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${project.version} + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + io.strimzi + kafka-oauth-client + 0.15.0 + + - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + diff --git a/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java b/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java index 450ecbeb5d..2664d7943f 100644 --- a/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java +++ b/examples/simple-protobuf/src/main/java/io/apicurio/registry/examples/simple/protobuf/SimpleProtobufExample.java @@ -43,21 +43,19 @@ import java.util.Properties; /** - * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe - * scenario with Protobuf as the serialization type. The following aspects are demonstrated: - * + * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe scenario with + * Protobuf as the serialization type. The following aspects are demonstrated: *
    - *
  1. Configuring a Kafka Serializer for use with Apicurio Registry
  2. - *
  3. Configuring a Kafka Deserializer for use with Apicurio Registry
  4. - *
  5. Auto-register the Protobuf schema in the registry (registered by the producer)
  6. - *
  7. Data sent as a custom java bean and received as a generic DynamicMessage
  8. + *
  9. Configuring a Kafka Serializer for use with Apicurio Registry
  10. + *
  11. Configuring a Kafka Deserializer for use with Apicurio Registry
  12. + *
  13. Auto-register the Protobuf schema in the registry (registered by the producer)
  14. + *
  15. Data sent as a custom java bean and received as a generic DynamicMessage
  16. *
*

* Pre-requisites: - * *

    - *
  • Kafka must be running on localhost:9092
  • - *
  • Apicurio Registry must be running on localhost:8080
  • + *
  • Kafka must be running on localhost:9092
  • + *
  • Apicurio Registry must be running on localhost:8080
  • *
* * @author eric.wittmann@gmail.com @@ -69,7 +67,6 @@ public class SimpleProtobufExample { private static final String TOPIC_NAME = SimpleProtobufExample.class.getSimpleName(); private static final String SCHEMA_NAME = "AddressBook"; - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + SimpleProtobufExample.class.getSimpleName()); String topicName = TOPIC_NAME; @@ -83,16 +80,8 @@ public static final void main(String[] args) throws Exception { for (int idx = 0; idx < 2; idx++) { AddressBookProtos.AddressBook book = AddressBook.newBuilder() - .addPeople(Person.newBuilder() - .setEmail("aa@bb.com") - .setId(1) - .setName("aa") - .build()) - .addPeople(Person.newBuilder() - .setEmail("bb@bb.com") - .setId(2) - .setName("bb") - .build()) + .addPeople(Person.newBuilder().setEmail("aa@bb.com").setId(1).setName("aa").build()) + .addPeople(Person.newBuilder().setEmail("bb@bb.com").setId(2).setName("bb").build()) .build(); // Send/produce the message on the Kafka Producer @@ -126,22 +115,24 @@ public static final void main(String[] args) throws Exception { if (records.count() == 0) { // Do nothing - no messages waiting. System.out.println("No messages waiting..."); - } else records.forEach(record -> { - DynamicMessage value = record.value(); - System.out.println("Consumed a message: " + value.toString()); - }); + } else + records.forEach(record -> { + DynamicMessage value = record.value(); + System.out.println("Consumed a message: " + value.toString()); + }); } } finally { consumer.close(); } - VertXRequestAdapter vertXRequestAdapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); vertXRequestAdapter.setBaseUrl(REGISTRY_URL); RegistryClient client = new RegistryClient(vertXRequestAdapter); System.out.println("The artifact created in Apicurio Registry is: "); - //because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of topicName-value - System.out.println(IoUtil.toString(client.groups().byGroupId("default").artifacts().byArtifactId(topicName + "-value").versions().byVersionExpression("1").content().get())); + // because the default ArtifactResolverStrategy is TopicIdStrategy the artifactId is in the form of + // topicName-value + System.out.println(IoUtil.toString(client.groups().byGroupId("default").artifacts() + .byArtifactId(topicName + "-value").versions().byVersionExpression("1").content().get())); System.out.println(); VertXAuthFactory.defaultVertx.close(); System.out.println("Done (success)."); @@ -159,7 +150,8 @@ private static Producer createKafkaProducer() { props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all"); props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // Use the Apicurio Registry provided Kafka Serializer for Protobuf - props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ProtobufKafkaSerializer.class.getName()); + props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + ProtobufKafkaSerializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); @@ -168,7 +160,7 @@ private static Producer createKafkaProducer() { // Register the artifact if not found in the registry. props.putIfAbsent(SerdeConfig.AUTO_REGISTER_ARTIFACT, Boolean.TRUE); - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka producer @@ -190,19 +182,21 @@ private static KafkaConsumer createKafkaConsumer() { props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); // Use the Apicurio Registry provided Kafka Deserializer for Protobuf - props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ProtobufKafkaDeserializer.class.getName()); + props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + ProtobufKafkaDeserializer.class.getName()); // Configure Service Registry location props.putIfAbsent(SerdeConfig.REGISTRY_URL, REGISTRY_URL); - //this configuration property forces the deserializer to return the generic DynamicMessage - props.putIfAbsent(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, DynamicMessage.class.getName()); + // this configuration property forces the deserializer to return the generic DynamicMessage + props.putIfAbsent(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + DynamicMessage.class.getName()); // No other configuration needed for the deserializer, because the globalId of the schema - // the deserializer should use is sent as part of the payload. So the deserializer simply + // the deserializer should use is sent as part of the payload. So the deserializer simply // extracts that globalId and uses it to look up the Schema from the registry. - //Just if security values are present, then we configure them. + // Just if security values are present, then we configure them. configureSecurityIfPresent(props); // Create the Kafka Consumer @@ -221,13 +215,16 @@ private static void configureSecurityIfPresent(Properties props) { props.putIfAbsent(SerdeConfig.AUTH_CLIENT_ID, authClient); props.putIfAbsent(SerdeConfig.AUTH_TOKEN_ENDPOINT, tokenEndpoint); props.putIfAbsent(SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); - props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); + props.putIfAbsent(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); props.putIfAbsent("security.protocol", "SASL_SSL"); - props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, String.format("org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + - " oauth.client.id=\"%s\" " + - " oauth.client.secret=\"%s\" " + - " oauth.token.endpoint.uri=\"%s\" ;", authClient, authSecret, tokenEndpoint)); + props.putIfAbsent(SaslConfigs.SASL_JAAS_CONFIG, + String.format( + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required " + + " oauth.client.id=\"%s\" " + " oauth.client.secret=\"%s\" " + + " oauth.token.endpoint.uri=\"%s\" ;", + authClient, authSecret, tokenEndpoint)); } } } diff --git a/examples/simple-validation/pom.xml b/examples/simple-validation/pom.xml index d95b1c2249..ef1063f3de 100644 --- a/examples/simple-validation/pom.xml +++ b/examples/simple-validation/pom.xml @@ -1,53 +1,52 @@ - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../pom.xml - + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-examples-simple-validation - jar + apicurio-registry-examples-simple-validation + jar - - - io.apicurio - apicurio-registry-java-sdk - ${project.version} - + + + io.apicurio + apicurio-registry-java-sdk + ${project.version} + - - io.apicurio - apicurio-registry-common - ${project.version} - + + io.apicurio + apicurio-registry-common + ${project.version} + - - org.everit.json - org.everit.json.schema - 1.5.1 - + + org.everit.json + org.everit.json.schema + 1.5.1 + - - org.slf4j - slf4j-jdk14 - ${slf4j.version} - - - io.apicurio - apicurio-common-rest-client-common - 0.1.18.Final - compile - - - io.apicurio - apicurio-common-rest-client-jdk - 0.1.18.Final - compile - - + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + + + io.apicurio + apicurio-common-rest-client-common + 0.1.18.Final + compile + + + io.apicurio + apicurio-common-rest-client-jdk + 0.1.18.Final + compile + + - \ No newline at end of file + diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java index 624474d279..e0931d1586 100644 --- a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessagePublisher.java @@ -38,39 +38,42 @@ public class MessagePublisher { private static final ApicurioHttpClient httpClient; static { - httpClient = new JdkHttpClientProvider().create("http://localhost:12345", Collections.EMPTY_MAP, null, new RestClientErrorHandler() { - @Override - @SuppressWarnings("serial") - public ApicurioRestClientException parseInputSerializingError(JsonProcessingException ex) { - return new ApicurioRestClientException(ex.getMessage()) { - }; - } + httpClient = new JdkHttpClientProvider().create("http://localhost:12345", Collections.EMPTY_MAP, null, + new RestClientErrorHandler() { + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException parseInputSerializingError( + JsonProcessingException ex) { + return new ApicurioRestClientException(ex.getMessage()) { + }; + } - @Override - @SuppressWarnings("serial") - public ApicurioRestClientException parseError(Exception ex) { - return new ApicurioRestClientException(ex.getMessage()) { - }; - } + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException parseError(Exception ex) { + return new ApicurioRestClientException(ex.getMessage()) { + }; + } - @Override - @SuppressWarnings("serial") - public ApicurioRestClientException handleErrorResponse(InputStream body, int statusCode) { - return new ApicurioRestClientException("Error with code: " + statusCode) { - }; - } - }); + @Override + @SuppressWarnings("serial") + public ApicurioRestClientException handleErrorResponse(InputStream body, int statusCode) { + return new ApicurioRestClientException("Error with code: " + statusCode) { + }; + } + }); } /** * @param message */ - @SuppressWarnings({"rawtypes"}) + @SuppressWarnings({ "rawtypes" }) public void publishMessage(MessageBean message) { JSONObject messageObj = new JSONObject(message); String data = messageObj.toString(); - Request request = new RequestBuilder().operation(Operation.POST).data(data).responseType(new TypeReference() { - }).build(); + Request request = new RequestBuilder().operation(Operation.POST).data(data) + .responseType(new TypeReference() { + }).build(); httpClient.sendRequest(request); System.out.println("Produced message: " + message); diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java index 3fbe079204..48fd003284 100644 --- a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/MessageValidator.java @@ -58,7 +58,8 @@ public MessageValidator(String registryUrl, String group, String artifactId) { */ public void validate(MessageBean message) throws IOException, ValidationException { JSONObject jsonSchema; - try (InputStream schemaIS = client.groups().byGroupId(group).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").content().get()) { + try (InputStream schemaIS = client.groups().byGroupId(group).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1").content().get()) { jsonSchema = new JSONObject(new JSONTokener(schemaIS)); } diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java index 02764b7e1b..57237fda1e 100644 --- a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleBroker.java @@ -29,7 +29,6 @@ import java.util.LinkedList; import java.util.List; - /** * @author eric.wittmann@gmail.com */ @@ -37,7 +36,7 @@ public class SimpleBroker { private static final int port = 12345; - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({ "unchecked", "rawtypes" }) private List messages = Collections.synchronizedList(new LinkedList()); private int getCursor = 0; diff --git a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java index cfcb34b7b7..51f2900fa8 100644 --- a/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java +++ b/examples/simple-validation/src/main/java/io/apicurio/registry/examples/simple/json/SimpleValidationExample.java @@ -16,11 +16,11 @@ package io.apicurio.registry.examples.simple.json; +import org.everit.json.schema.ValidationException; + import java.security.SecureRandom; import java.util.Optional; -import org.everit.json.schema.ValidationException; - /** * This example demonstrates how to integrate with Apicurio Registry when performing client-side validation of * JSON messages. This example imagines a generic scenario where JSON messages are sent/published to a custom @@ -46,7 +46,6 @@ public class SimpleValidationExample { private static final String ARTIFACT_ID = "MessageType"; private static final SecureRandom rand = new SecureRandom(); - public static final void main(String[] args) throws Exception { System.out.println("Starting example " + SimpleValidationExample.class.getSimpleName()); diff --git a/examples/tools/kafkasql-topic-import/pom.xml b/examples/tools/kafkasql-topic-import/pom.xml index 7a1cb4bf52..15bed3c0cd 100644 --- a/examples/tools/kafkasql-topic-import/pom.xml +++ b/examples/tools/kafkasql-topic-import/pom.xml @@ -1,94 +1,93 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry-examples - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry-examples + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-tools-kafkasql-topic-import - jar + apicurio-registry-tools-kafkasql-topic-import + jar - + - - org.projectlombok - lombok - 1.18.28 - + + org.projectlombok + lombok + 1.18.28 + - - com.fasterxml.jackson.core - jackson-core - 2.15.2 - + + com.fasterxml.jackson.core + jackson-core + 2.15.2 + - - com.fasterxml.jackson.core - jackson-databind - 2.15.2 - + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + - - org.apache.kafka - kafka-clients - 3.2.3 - + + org.apache.kafka + kafka-clients + 3.2.3 + - - com.google.guava - guava - 32.1.3-jre - + + com.google.guava + guava + 32.1.3-jre + - - info.picocli - picocli - 4.7.5 - + + info.picocli + picocli + 4.7.5 + - - org.slf4j - slf4j-api - 2.0.9 - + + org.slf4j + slf4j-api + 2.0.9 + - - org.slf4j - slf4j-simple - 2.0.9 - + + org.slf4j + slf4j-simple + 2.0.9 + - + - - - - maven-assembly-plugin - - - - io.apicurio.registry.tools.kafkasqltopicimport.Main - - - - jar-with-dependencies - - - - - make-assembly - package - - single - - - - - - + + + + maven-assembly-plugin + + + + io.apicurio.registry.tools.kafkasqltopicimport.Main + + + + jar-with-dependencies + + + + + make-assembly + + single + + package + + + + + diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java index f3958ead25..f0a3274973 100644 --- a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/Envelope.java @@ -20,7 +20,6 @@ import java.util.List; - /** * @author Jakub Senko m@jsenko.net */ diff --git a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java index c3f26f36de..565c2ee7fe 100644 --- a/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java +++ b/examples/tools/kafkasql-topic-import/src/main/java/io/apicurio/registry/tools/kafkasqltopicimport/ImportCommand.java @@ -24,7 +24,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.header.internals.RecordHeader; import org.apache.kafka.common.serialization.ByteArraySerializer; -import org.slf4j.simple.SimpleLogger; import picocli.CommandLine.Command; import java.io.BufferedReader; @@ -46,20 +45,20 @@ public class ImportCommand implements Runnable { private static final ObjectMapper mapper = new ObjectMapper(); - @Option(names = {"-b", "--bootstrap-sever"}, description = "Kafka bootstrap server URL.", - required = true, defaultValue = "localhost:9092") + @Option(names = { "-b", + "--bootstrap-sever" }, description = "Kafka bootstrap server URL.", required = true, defaultValue = "localhost:9092") private String kafkaBootstrapServer; - @Option(names = {"-f", "--file"}, description = "Path to a kafkasql-journal topic dump file. " + - "Messages must use a JSON envelope and have base64-encoded keys and values.", required = true) + @Option(names = { "-f", "--file" }, description = "Path to a kafkasql-journal topic dump file. " + + "Messages must use a JSON envelope and have base64-encoded keys and values.", required = true) private String dumpFilePath; - @Option(names = {"-d", "--debug"}, description = "Print debug log messages.", defaultValue = "false") + @Option(names = { "-d", "--debug" }, description = "Print debug log messages.", defaultValue = "false") private boolean debug; public void run() { - if(debug) { + if (debug) { System.setProperty(org.slf4j.simple.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "DEBUG"); } else { System.setProperty(org.slf4j.simple.SimpleLogger.DEFAULT_LOG_LEVEL_KEY, "WARN"); @@ -76,32 +75,29 @@ public void run() { envelope.setHeaders(List.of()); } if (envelope.getHeaders().size() % 2 != 0) { - throw new RuntimeException("Invalid length of the headers field: " + envelope.getHeaders().size()); + throw new RuntimeException( + "Invalid length of the headers field: " + envelope.getHeaders().size()); } - var key = envelope.getKey() != null ? Base64.getDecoder().decode(envelope.getKey()) : null; - var value = envelope.getPayload() != null ? Base64.getDecoder().decode(envelope.getPayload()) : null; - - var record = new ProducerRecord<>( - envelope.getTopic(), - envelope.getPartition(), - envelope.getTs(), - key, - value, - Streams.zip( - Streams.zip( - IntStream.range(0, Integer.MAX_VALUE).boxed(), - envelope.getHeaders().stream(), - Tuple::new - ).filter(t -> t.getA() % 2 == 0).map(Tuple::getB), // Even indexes: 0,2,4,... - Streams.zip( - IntStream.range(0, Integer.MAX_VALUE).boxed(), - envelope.getHeaders().stream(), - Tuple::new - ).filter(t -> t.getA() % 2 == 1).map(Tuple::getB), // Odd indexes: 1,3,5,... + var key = envelope.getKey() != null ? Base64.getDecoder().decode(envelope.getKey()) + : null; + var value = envelope.getPayload() != null + ? Base64.getDecoder().decode(envelope.getPayload()) : null; + + var record = new ProducerRecord<>(envelope.getTopic(), envelope.getPartition(), + envelope.getTs(), key, value, Streams + .zip(Streams + .zip(IntStream.range(0, Integer.MAX_VALUE).boxed(), + envelope.getHeaders().stream(), Tuple::new) + .filter(t -> t.getA() % 2 == 0).map(Tuple::getB), // Even indexes: + // 0,2,4,... + Streams.zip(IntStream.range(0, Integer.MAX_VALUE).boxed(), + envelope.getHeaders().stream(), Tuple::new) + .filter(t -> t.getA() % 2 == 1).map(Tuple::getB), // Odd + // indexes: + // 1,3,5,... (k, v) -> new RecordHeader(k, v.getBytes(StandardCharsets.UTF_8))) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); producer.send(record); } } @@ -115,7 +111,6 @@ var record = new ProducerRecord<>( } } - private Producer createKafkaProducer() { Properties props = new Properties(); diff --git a/go-sdk/pom.xml b/go-sdk/pom.xml index 32e0f60b62..6b735728ae 100644 --- a/go-sdk/pom.xml +++ b/go-sdk/pom.xml @@ -1,54 +1,51 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-go-sdk - jar - apicurio-registry-go-sdk + apicurio-registry-go-sdk + jar + apicurio-registry-go-sdk - - - go-sdk-regenerate - - - !skipNonJavaGen - - - - - - org.codehaus.mojo - exec-maven-plugin - 3.3.0 - - - - exec - - generate-sources - - - - make - ${project.basedir} - - generate - format - - - - - - - + + + go-sdk-regenerate + + + !skipNonJavaGen + + + + + + org.codehaus.mojo + exec-maven-plugin + 3.3.0 + + make + ${project.basedir} + + generate + format + + + + + + exec + + generate-sources + + + + + + + diff --git a/ide-config/eclipse-format.xml b/ide-config/eclipse-format.xml new file mode 100644 index 0000000000..eda112bd95 --- /dev/null +++ b/ide-config/eclipse-format.xml @@ -0,0 +1,318 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ide-config/eclipse.importorder b/ide-config/eclipse.importorder new file mode 100644 index 0000000000..13ad03cdc7 --- /dev/null +++ b/ide-config/eclipse.importorder @@ -0,0 +1,5 @@ +#Organize Import Order +0= +1=java +2=javax +3=\# diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 48391da14f..f13af511c0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -1,502 +1,493 @@ - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - - apicurio-registry-integration-tests + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + + apicurio-registry-integration-tests - - true - localhost - 0 - - - - - io.quarkus - quarkus-junit5 - - - io.fabric8 - kubernetes-client - - - kubernetes-httpclient-okhttp - io.fabric8 - - - - - io.fabric8 - kubernetes-httpclient-vertx - - - io.vertx - vertx-uri-template - - - io.fabric8 - openshift-client - - - org.junit.platform - junit-platform-launcher - - - jakarta.ws.rs - jakarta.ws.rs-api - - - io.apicurio - apicurio-registry-java-sdk - - - io.apicurio - apicurio-registry-utils-kafka - test-jar - test - - - io.apicurio - apicurio-registry-utils-tests - - - io.apicurio - apicurio-registry-maven-plugin - maven-plugin - test - - - io.apicurio - apicurio-registry-serdes-avro-serde - test - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - test - - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - test - - - io.apicurio - apicurio-registry-utils-converter - test - - - io.apicurio - apicurio-registry-utils-kafka - test - - - - io.confluent - kafka-avro-serializer - provided - - - io.confluent - kafka-connect-avro-converter - test - - - io.rest-assured - rest-assured - test - - - io.zonky.test - embedded-postgres - test - + + true + localhost + 0 + + + + + io.quarkus + quarkus-junit5 + + + io.fabric8 + kubernetes-client + + + io.fabric8 + kubernetes-httpclient-okhttp + + + + + io.fabric8 + kubernetes-httpclient-vertx + + + io.vertx + vertx-uri-template + + + io.fabric8 + openshift-client + + + org.junit.platform + junit-platform-launcher + + + jakarta.ws.rs + jakarta.ws.rs-api + + + io.apicurio + apicurio-registry-java-sdk + + + io.apicurio + apicurio-registry-utils-kafka + test-jar + test + + + io.apicurio + apicurio-registry-utils-tests + + + io.apicurio + apicurio-registry-maven-plugin + maven-plugin + test + + + io.apicurio + apicurio-registry-serdes-avro-serde + test + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + test + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + test + + + io.apicurio + apicurio-registry-utils-converter + test + + + io.apicurio + apicurio-registry-utils-kafka + test + + + + io.confluent + kafka-avro-serializer + provided + + + io.confluent + kafka-connect-avro-converter + test + + + io.rest-assured + rest-assured + test + + + io.zonky.test + embedded-postgres + test + + + commons-io + commons-io + test + + + io.apicurio + apicurio-registry-utils-import-export + test + + + io.vertx + vertx-core + test + + + + + + + maven-jar-plugin + + + + test-jar + + + + + + maven-failsafe-plugin + + + org.jboss.logmanager.LogManager + ${maven.home} + ${groups} + + + + + + integration-test + verify + + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + + + + + + local-tests + + false + + - commons-io - commons-io - test - - - io.apicurio - apicurio-registry-utils-import-export - test + io.apicurio + apicurio-registry-app - io.vertx - vertx-core - test + io.apicurio + apicurio-registry-app + test-jar + + + io.apicurio + apicurio-registry-schema-util-provider + + - - - + + - - maven-jar-plugin - - - - test-jar - - - - - - maven-failsafe-plugin + + org.apache.maven.plugins + maven-dependency-plugin + + + unpack-static-resources + + unpack + + generate-resources + + + + ${project.groupId} + apicurio-registry-app + ${project.version} + jar + true + **/web.xml,**/application.properties + + + ${project.build.outputDirectory} + false + true + + + + unpack-test-dependencies + + unpack-dependencies + + process-test-classes + + io.apicurio + test-jar + test + ${project.build.directory}/test-classes + **/*IT.class, **/*.properties + false + true + + + + + + org.codehaus.mojo + properties-maven-plugin + + + + set-system-properties + - - org.jboss.logmanager.LogManager - ${maven.home} - ${groups} - + + + quarkus.http.test-host + ${quarkus.http.test-host} + + + quarkus.http.test-port + ${quarkus.http.test-port} + + - - - - integration-test - verify - - - - - - io.quarkus - quarkus-maven-plugin - - - - build - - - - + + + - - - - - - local-tests - - false - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack-static-resources - generate-resources - - unpack - - - - - ${project.groupId} - apicurio-registry-app - ${project.version} - jar - true - **/web.xml,**/application.properties - - - ${project.build.outputDirectory} - false - true - - - - unpack-test-dependencies - process-test-classes - - unpack-dependencies - - - io.apicurio - test-jar - test - ${project.build.directory}/test-classes - **/*IT.class, **/*.properties - false - true - - - - - - org.codehaus.mojo - properties-maven-plugin - - - - set-system-properties - - - - - quarkus.http.test-host - ${quarkus.http.test-host} - - - quarkus.http.test-port - ${quarkus.http.test-port} - - - - - - - - - - - io.apicurio - apicurio-registry-app - - - io.apicurio - apicurio-registry-app - test-jar - - - io.apicurio - apicurio-registry-schema-util-provider - - - - - + + - - - remote-mem - - false - - - - - maven-failsafe-plugin - - - - integration-test - - - - - quay.io/apicurio/apicurio-registry:latest-snapshot - - true - true - localhost - 8080 - - - - - - - - - - - remote-kafka - - false - - - - - maven-failsafe-plugin - - - - integration-test - - - - - quay.io/apicurio/apicurio-registry:latest-snapshot - - true - true - localhost - 8080 - - - - - - - - + + + remote-mem + + false + + + + + maven-failsafe-plugin + + + + integration-test + + + + quay.io/apicurio/apicurio-registry:latest-snapshot + true + true + localhost + 8080 + + + + + + + + - - remote-sql - - false - - - - - maven-failsafe-plugin - - - - integration-test - - - - - quay.io/apicurio/apicurio-registry:latest-snapshot - - true - true - localhost - 8080 - - - - - - - - + + remote-kafka + + false + + + + + maven-failsafe-plugin + + + + integration-test + + + + quay.io/apicurio/apicurio-registry:latest-snapshot + true + true + localhost + 8080 + + + + + + + + + + remote-sql + + false + + + + + maven-failsafe-plugin + + + + integration-test + + + + quay.io/apicurio/apicurio-registry:latest-snapshot + true + true + localhost + 8080 + + + + + + + + - - - all - - false - ( smoke | serdes | acceptance | ui) - - + + + all + + false + ( smoke | serdes | acceptance | ui) + + - - ci - - false - ( smoke | serdes | acceptance ) - - + + ci + + false + ( smoke | serdes | acceptance ) + + - - smoke - - smoke - false - - + + smoke + + smoke + false + + - - serdes - - serdes - false - - + + serdes + + serdes + false + + - - ui - - ui - false - - + + ui + + ui + false + + - - acceptance - - false - acceptance - - + + acceptance + + false + acceptance + + - - auth - - auth - false - - - - kafkasql-snapshotting - - kafkasql-snapshotting - false - - - - migration - - false - migration - - + + auth + + auth + false + + + + kafkasql-snapshotting + + kafkasql-snapshotting + false + + + + migration + + false + migration + + - - sqlit - - false - sqlit - - - - - maven-failsafe-plugin - - - - integration-test - - - - - - - + + sqlit + + false + sqlit + + + + + maven-failsafe-plugin + + + + integration-test + + + + + + + - - kafkasqlit - - false - kafkasqlit - - + + kafkasqlit + + false + kafkasqlit + + - - openshift - - - - maven-failsafe-plugin - - - - integration-test - - - - true - - - - - - - - + + openshift + + + + maven-failsafe-plugin + + + + integration-test + + + + true + + + + + + + + - + diff --git a/integration-tests/src/main/java/io/apicurio/deployment/Constants.java b/integration-tests/src/main/java/io/apicurio/deployment/Constants.java index 210bb54330..0e7ecce745 100644 --- a/integration-tests/src/main/java/io/apicurio/deployment/Constants.java +++ b/integration-tests/src/main/java/io/apicurio/deployment/Constants.java @@ -29,8 +29,5 @@ public class Constants { */ static final String SQL = "sqlit"; - - public static final String TEST_PROFILE = - Optional.ofNullable(System.getProperty("groups")) - .orElse(""); + public static final String TEST_PROFILE = Optional.ofNullable(System.getProperty("groups")).orElse(""); } diff --git a/integration-tests/src/test/java/io/apicurio/deployment/KafkaSqlDeploymentManager.java b/integration-tests/src/test/java/io/apicurio/deployment/KafkaSqlDeploymentManager.java index 893f070f55..0258928516 100644 --- a/integration-tests/src/test/java/io/apicurio/deployment/KafkaSqlDeploymentManager.java +++ b/integration-tests/src/test/java/io/apicurio/deployment/KafkaSqlDeploymentManager.java @@ -40,11 +40,9 @@ public class KafkaSqlDeploymentManager { static void deployKafkaApp(String registryImage) throws Exception { if (Constants.TEST_PROFILE.equals(Constants.AUTH)) { prepareTestsInfra(KAFKA_RESOURCES, APPLICATION_KAFKA_SECURED_RESOURCES, true, registryImage); - } - else if (Constants.TEST_PROFILE.equals(Constants.KAFKA_SQL_SNAPSHOTTING)) { + } else if (Constants.TEST_PROFILE.equals(Constants.KAFKA_SQL_SNAPSHOTTING)) { prepareKafkaSqlSnapshottingTests(registryImage); - } - else { + } else { prepareTestsInfra(KAFKA_RESOURCES, APPLICATION_KAFKA_RESOURCES, false, registryImage); } } @@ -52,20 +50,21 @@ else if (Constants.TEST_PROFILE.equals(Constants.KAFKA_SQL_SNAPSHOTTING)) { private static void prepareKafkaSqlSnapshottingTests(String registryImage) throws Exception { LOGGER.info("Preparing data for KafkaSQL snapshot tests..."); - //First we deploy the Registry application with all the required data. + // First we deploy the Registry application with all the required data. prepareTestsInfra(KAFKA_RESOURCES, APPLICATION_KAFKA_RESOURCES, false, registryImage); prepareSnapshotData(ApicurioRegistryBaseIT.getRegistryV3ApiUrl()); - //Once all the data has been introduced, the existing deployment is deleted so all the replicas are re-created and restored from the snapshot. + // Once all the data has been introduced, the existing deployment is deleted so all the replicas are + // re-created and restored from the snapshot. deleteRegistryDeployment(); - //Now we re-recreate the deployment so all the replicas are restored from the snapshot. + // Now we re-recreate the deployment so all the replicas are restored from the snapshot. LOGGER.info("Finished preparing data for the KafkaSQL snapshot tests."); prepareTestsInfra(null, APPLICATION_KAFKA_RESOURCES, false, registryImage); } private static void prepareSnapshotData(String registryBaseUrl) { - //Create a bunch of artifacts and rules, so they're added to the snapshot. + // Create a bunch of artifacts and rules, so they're added to the snapshot. String simpleAvro = resourceToString("artifactTypes/avro/multi-field_v1.json"); var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); @@ -75,14 +74,15 @@ private static void prepareSnapshotData(String registryBaseUrl) { LOGGER.info("Creating 1000 artifacts that will be packed into a snapshot.."); for (int idx = 0; idx < 1000; idx++) { String artifactId = UUID.randomUUID().toString(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, simpleAvro, - ContentTypes.APPLICATION_JSON); - client.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts() - .post(createArtifact, config -> config.headers.add("X-Registry-ArtifactId", artifactId)); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + simpleAvro, ContentTypes.APPLICATION_JSON); + client.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().post(createArtifact, + config -> config.headers.add("X-Registry-ArtifactId", artifactId)); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - client.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().byArtifactId(artifactId).rules().post(createRule); + client.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts() + .byArtifactId(artifactId).rules().post(createRule); } LOGGER.info("Creating kafkasql snapshot.."); @@ -91,34 +91,34 @@ private static void prepareSnapshotData(String registryBaseUrl) { LOGGER.info("Adding new artifacts on top of the snapshot.."); for (int idx = 0; idx < 1000; idx++) { String artifactId = UUID.randomUUID().toString(); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, simpleAvro, - ContentTypes.APPLICATION_JSON); - client.groups().byGroupId("default").artifacts() - .post(createArtifact, config -> config.headers.add("X-Registry-ArtifactId", artifactId)); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + simpleAvro, ContentTypes.APPLICATION_JSON); + client.groups().byGroupId("default").artifacts().post(createArtifact, + config -> config.headers.add("X-Registry-ArtifactId", artifactId)); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - client.groups().byGroupId("default").artifacts().byArtifactId(artifactId).rules().post(createRule); + client.groups().byGroupId("default").artifacts().byArtifactId(artifactId).rules() + .post(createRule); } } private static void deleteRegistryDeployment() { - final RollableScalableResource deploymentResource = kubernetesClient.apps().deployments().inNamespace(TEST_NAMESPACE) - .withName(APPLICATION_DEPLOYMENT); + final RollableScalableResource deploymentResource = kubernetesClient.apps().deployments() + .inNamespace(TEST_NAMESPACE).withName(APPLICATION_DEPLOYMENT); - kubernetesClient.apps().deployments().inNamespace(TEST_NAMESPACE).withName(APPLICATION_DEPLOYMENT).delete(); + kubernetesClient.apps().deployments().inNamespace(TEST_NAMESPACE).withName(APPLICATION_DEPLOYMENT) + .delete(); - //Wait for the deployment to be deleted + // Wait for the deployment to be deleted CompletableFuture> deployment = deploymentResource .informOnCondition(Collection::isEmpty); try { deployment.get(60, TimeUnit.SECONDS); - } - catch (ExecutionException | InterruptedException | TimeoutException e) { + } catch (ExecutionException | InterruptedException | TimeoutException e) { LOGGER.warn("Error waiting for deployment deletion", e); - } - finally { + } finally { deployment.cancel(true); } } diff --git a/integration-tests/src/test/java/io/apicurio/deployment/PortForwardManager.java b/integration-tests/src/test/java/io/apicurio/deployment/PortForwardManager.java index a94f1b51e9..750b30e2fd 100644 --- a/integration-tests/src/test/java/io/apicurio/deployment/PortForwardManager.java +++ b/integration-tests/src/test/java/io/apicurio/deployment/PortForwardManager.java @@ -24,8 +24,7 @@ public class PortForwardManager implements BeforeAllCallback, AfterAllCallback, public PortForwardManager() { if (Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - kubernetesClient = new KubernetesClientBuilder() - .build(); + kubernetesClient = new KubernetesClientBuilder().build(); } } @@ -49,19 +48,18 @@ public void afterAll(ExtensionContext context) throws Exception { } } - private void startKeycloakPortForward() { try { if (keycloakPortForward != null) { keycloakPortForward.close(); } - //Create the keycloak port forward so the tests can reach it to get tokens - keycloakPortForward = kubernetesClient.services() - .inNamespace(TEST_NAMESPACE) - .withName(KEYCLOAK_SERVICE) - .portForward(8090, 8090); + // Create the keycloak port forward so the tests can reach it to get tokens + keycloakPortForward = kubernetesClient.services().inNamespace(TEST_NAMESPACE) + .withName(KEYCLOAK_SERVICE).portForward(8090, 8090); } catch (IllegalStateException | IOException ex) { - logger.warn("Error found forwarding keycloak port, the port forwarding might be running already, continuing...", ex); + logger.warn( + "Error found forwarding keycloak port, the port forwarding might be running already, continuing...", + ex); } } diff --git a/integration-tests/src/test/java/io/apicurio/deployment/RegistryDeploymentManager.java b/integration-tests/src/test/java/io/apicurio/deployment/RegistryDeploymentManager.java index a60bc56bc9..f016b02d07 100644 --- a/integration-tests/src/test/java/io/apicurio/deployment/RegistryDeploymentManager.java +++ b/integration-tests/src/test/java/io/apicurio/deployment/RegistryDeploymentManager.java @@ -37,8 +37,7 @@ public class RegistryDeploymentManager implements TestExecutionListener { public void testPlanExecutionStarted(TestPlan testPlan) { if (Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - kubernetesClient = new KubernetesClientBuilder() - .build(); + kubernetesClient = new KubernetesClientBuilder().build(); try { handleInfraDeployment(); @@ -56,11 +55,11 @@ public void testPlanExecutionFinished(TestPlan testPlan) { try { - //Finally, once the testsuite is done, cleanup all the resources in the cluster - if (kubernetesClient != null && !(Boolean.parseBoolean(System.getProperty("preserveNamespace")))) { + // Finally, once the testsuite is done, cleanup all the resources in the cluster + if (kubernetesClient != null + && !(Boolean.parseBoolean(System.getProperty("preserveNamespace")))) { LOGGER.info("Closing test resources ##################################################"); - if (logWatch != null && !logWatch.isEmpty()) { logWatch.forEach(LogWatch::close); } @@ -81,39 +80,46 @@ public void testPlanExecutionFinished(TestPlan testPlan) { } private void handleInfraDeployment() throws Exception { - //First, create the namespace used for the test. - kubernetesClient.load(getClass().getResourceAsStream(E2E_NAMESPACE_RESOURCE)) - .create(); + // First, create the namespace used for the test. + kubernetesClient.load(getClass().getResourceAsStream(E2E_NAMESPACE_RESOURCE)).create(); - //Based on the configuration, deploy the appropriate variant + // Based on the configuration, deploy the appropriate variant if (Boolean.parseBoolean(System.getProperty("deployInMemory"))) { - LOGGER.info("Deploying In Memory Registry Variant with image: {} ##################################################", System.getProperty("registry-in-memory-image")); + LOGGER.info( + "Deploying In Memory Registry Variant with image: {} ##################################################", + System.getProperty("registry-in-memory-image")); InMemoryDeploymentManager.deployInMemoryApp(System.getProperty("registry-in-memory-image")); logWatch = streamPodLogs("apicurio-registry-memory"); } else if (Boolean.parseBoolean(System.getProperty("deploySql"))) { - LOGGER.info("Deploying SQL Registry Variant with image: {} ##################################################", System.getProperty("registry-sql-image")); + LOGGER.info( + "Deploying SQL Registry Variant with image: {} ##################################################", + System.getProperty("registry-sql-image")); SqlDeploymentManager.deploySqlApp(System.getProperty("registry-sql-image")); logWatch = streamPodLogs("apicurio-registry-sql"); } else if (Boolean.parseBoolean(System.getProperty("deployKafka"))) { - LOGGER.info("Deploying Kafka SQL Registry Variant with image: {} ##################################################", System.getProperty("registry-kafkasql-image")); + LOGGER.info( + "Deploying Kafka SQL Registry Variant with image: {} ##################################################", + System.getProperty("registry-kafkasql-image")); KafkaSqlDeploymentManager.deployKafkaApp(System.getProperty("registry-kafkasql-image")); logWatch = streamPodLogs("apicurio-registry-kafka"); } } - static void prepareTestsInfra(String externalResources, String registryResources, boolean startKeycloak, String - registryImage) throws IOException { + static void prepareTestsInfra(String externalResources, String registryResources, boolean startKeycloak, + String registryImage) throws IOException { if (startKeycloak) { LOGGER.info("Deploying Keycloak resources ##################################################"); deployResource(KEYCLOAK_RESOURCES); } if (externalResources != null) { - LOGGER.info("Deploying external dependencies for Registry ##################################################"); + LOGGER.info( + "Deploying external dependencies for Registry ##################################################"); deployResource(externalResources); } - final InputStream resourceAsStream = RegistryDeploymentManager.class.getResourceAsStream(registryResources); + final InputStream resourceAsStream = RegistryDeploymentManager.class + .getResourceAsStream(registryResources); assert resourceAsStream != null; @@ -124,22 +130,22 @@ static void prepareTestsInfra(String externalResources, String registryResources } try { - //Deploy all the resources associated to the registry variant - kubernetesClient.load(IOUtils.toInputStream(registryLoadedResources, StandardCharsets.UTF_8.name())) + // Deploy all the resources associated to the registry variant + kubernetesClient + .load(IOUtils.toInputStream(registryLoadedResources, StandardCharsets.UTF_8.name())) .create(); } catch (Exception ex) { LOGGER.debug("Error creating registry resources:", ex); } - //Wait for all the pods of the variant to be ready - kubernetesClient.pods() - .inNamespace(TEST_NAMESPACE).waitUntilReady(360, TimeUnit.SECONDS); + // Wait for all the pods of the variant to be ready + kubernetesClient.pods().inNamespace(TEST_NAMESPACE).waitUntilReady(360, TimeUnit.SECONDS); setupTestNetworking(); } private static void setupTestNetworking() { - //For openshift, a route to the application is created we use it to set up the networking needs. + // For openshift, a route to the application is created we use it to set up the networking needs. if (Boolean.parseBoolean(System.getProperty("openshift.resources"))) { OpenShiftClient openShiftClient = new DefaultOpenShiftClient(); @@ -156,36 +162,36 @@ private static void setupTestNetworking() { LOGGER.warn("The registry route already exists: ", ex); } - } else { - //If we're running the cluster tests but no external endpoint has been provided, set the value of the load balancer. - if (System.getProperty("quarkus.http.test-host").equals("localhost") && !System.getProperty("os.name").contains("Mac OS")) { - System.setProperty("quarkus.http.test-host", kubernetesClient.services().inNamespace(TEST_NAMESPACE).withName(APPLICATION_SERVICE).get().getSpec().getClusterIP()); + // If we're running the cluster tests but no external endpoint has been provided, set the value of + // the load balancer. + if (System.getProperty("quarkus.http.test-host").equals("localhost") + && !System.getProperty("os.name").contains("Mac OS")) { + System.setProperty("quarkus.http.test-host", + kubernetesClient.services().inNamespace(TEST_NAMESPACE).withName(APPLICATION_SERVICE) + .get().getSpec().getClusterIP()); } } } private static void deployResource(String resource) { - //Deploy all the resources associated to the external requirements - kubernetesClient.load(RegistryDeploymentManager.class.getResourceAsStream(resource)) - .create(); + // Deploy all the resources associated to the external requirements + kubernetesClient.load(RegistryDeploymentManager.class.getResourceAsStream(resource)).create(); - //Wait for all the external resources pods to be ready - kubernetesClient.pods() - .inNamespace(TEST_NAMESPACE).waitUntilReady(360, TimeUnit.SECONDS); + // Wait for all the external resources pods to be ready + kubernetesClient.pods().inNamespace(TEST_NAMESPACE).waitUntilReady(360, TimeUnit.SECONDS); } private static List streamPodLogs(String container) { List logWatchList = new ArrayList<>(); - PodList podList = kubernetesClient.pods().inNamespace(TEST_NAMESPACE).withLabel("app", container).list(); + PodList podList = kubernetesClient.pods().inNamespace(TEST_NAMESPACE).withLabel("app", container) + .list(); - podList.getItems().forEach(p -> logWatchList.add(kubernetesClient.pods() - .inNamespace(TEST_NAMESPACE) - .withName(p.getMetadata().getName()) - .inContainer(container) - .tailingLines(10) - .watchLog(System.out))); + podList.getItems() + .forEach(p -> logWatchList.add(kubernetesClient.pods().inNamespace(TEST_NAMESPACE) + .withName(p.getMetadata().getName()).inContainer(container).tailingLines(10) + .watchLog(System.out))); return logWatchList; } diff --git a/integration-tests/src/test/java/io/apicurio/tests/ApicurioRegistryBaseIT.java b/integration-tests/src/test/java/io/apicurio/tests/ApicurioRegistryBaseIT.java index d66cc37306..d6a7700f5d 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/ApicurioRegistryBaseIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/ApicurioRegistryBaseIT.java @@ -67,9 +67,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue; /** - * Base class for all base classes for integration tests or for integration tests directly. - * This class must not contain any functionality nor implement any beforeAll, beforeEach. - * + * Base class for all base classes for integration tests or for integration tests directly. This class must + * not contain any functionality nor implement any beforeAll, beforeEach. */ @DisplayNameGeneration(SimpleDisplayName.class) @TestInstance(Lifecycle.PER_CLASS) @@ -83,7 +82,8 @@ public class ApicurioRegistryBaseIT implements TestSeparator, Constants { protected final Logger logger = LoggerFactory.getLogger(this.getClass().getName()); - protected Function errorCodeExtractor = e -> ((ApiException)e).getResponseStatusCode(); + protected Function errorCodeExtractor = e -> ((ApiException) e) + .getResponseStatusCode(); protected RegistryClient registryClient; @@ -97,7 +97,8 @@ protected RegistryClient createRegistryClient() { @BeforeAll void prepareRestAssured() { - authServerUrlConfigured = Optional.ofNullable(ConfigProvider.getConfig().getConfigValue("quarkus.oidc.token-path").getValue()) + authServerUrlConfigured = Optional + .ofNullable(ConfigProvider.getConfig().getConfigValue("quarkus.oidc.token-path").getValue()) .orElse("http://localhost:8090/realms/registry/protocol/openid-connect/token"); registryClient = createRegistryClient(); RestAssured.baseURI = getRegistryV3ApiUrl(); @@ -110,15 +111,18 @@ void prepareRestAssured() { public void cleanArtifacts() throws Exception { logger.info("Removing all artifacts"); // Retrying to delete artifacts can solve the problem with bad order caused by artifacts references - // TODO: Solve problem with artifact references circle - maybe use of deleteAllUserData for cleaning artifacts after IT + // TODO: Solve problem with artifact references circle - maybe use of deleteAllUserData for cleaning + // artifacts after IT retry(() -> { ArtifactSearchResults artifacts = registryClient.search().artifacts().get(); for (SearchedArtifact artifact : artifacts.getArtifacts()) { try { - registryClient.groups().byGroupId(normalizeGroupId(artifact.getGroupId())).artifacts().byArtifactId(artifact.getArtifactId()).delete(); - registryClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()).artifacts().delete(); + registryClient.groups().byGroupId(normalizeGroupId(artifact.getGroupId())).artifacts() + .byArtifactId(artifact.getArtifactId()).delete(); + registryClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString()) + .artifacts().delete(); } catch (ApiException e) { - //because of async storage artifact may be already deleted but listed anyway + // because of async storage artifact may be already deleted but listed anyway logger.info(e.getMessage()); } catch (Exception e) { logger.error("", e); @@ -134,9 +138,11 @@ private static String normalizeGroupId(String groupId) { return groupId != null ? groupId : "default"; // TODO } - protected CreateArtifactResponse createArtifact(String groupId, String artifactId, String artifactType, String content, - String contentType, IfArtifactExists ifExists, Consumer customizer) throws Exception { - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, artifactType, content, contentType); + protected CreateArtifactResponse createArtifact(String groupId, String artifactId, String artifactType, + String content, String contentType, IfArtifactExists ifExists, + Consumer customizer) throws Exception { + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, artifactType, content, + contentType); if (customizer != null) { customizer.accept(createArtifact); } @@ -149,33 +155,38 @@ protected CreateArtifactResponse createArtifact(String groupId, String artifactI // make sure we have schema registered ensureClusterSync(response.getVersion().getGlobalId()); - ensureClusterSync(normalizeGroupId(response.getArtifact().getGroupId()), response.getArtifact().getArtifactId(), - String.valueOf(response.getVersion().getVersion())); + ensureClusterSync(normalizeGroupId(response.getArtifact().getGroupId()), + response.getArtifact().getArtifactId(), String.valueOf(response.getVersion().getVersion())); return response; } - protected VersionMetaData createArtifactVersion(String groupId, String artifactId, String content, String contentType, Consumer customizer) throws Exception { + protected VersionMetaData createArtifactVersion(String groupId, String artifactId, String content, + String contentType, Consumer customizer) throws Exception { CreateVersion createVersion = TestUtils.clientCreateVersion(content, contentType); if (customizer != null) { customizer.accept(createVersion); } - VersionMetaData meta = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + VersionMetaData meta = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().post(createVersion); - //wait for storage + // wait for storage ensureClusterSync(meta.getGlobalId()); - ensureClusterSync(normalizeGroupId(meta.getGroupId()), meta.getArtifactId(), String.valueOf(meta.getVersion())); + ensureClusterSync(normalizeGroupId(meta.getGroupId()), meta.getArtifactId(), + String.valueOf(meta.getVersion())); return meta; } - //DO NOT USE FOR CREATE OR UPDATE OPERATIONS + // DO NOT USE FOR CREATE OR UPDATE OPERATIONS protected void retryOp(RegistryWaitUtils.ConsumerExc registryOp) throws Exception { RegistryWaitUtils.retry(registryClient, registryOp); } - //DO NOT USE FOR CREATE OR UPDATE OPERATIONS - protected void retryAssertClientError(String expectedErrorName, int expectedCode, RegistryWaitUtils.ConsumerExc registryOp, Function errorCodeExtractor) throws Exception { + // DO NOT USE FOR CREATE OR UPDATE OPERATIONS + protected void retryAssertClientError(String expectedErrorName, int expectedCode, + RegistryWaitUtils.ConsumerExc registryOp, + Function errorCodeExtractor) throws Exception { RegistryWaitUtils.retry(registryClient, (rc) -> { assertClientError(expectedErrorName, expectedCode, () -> registryOp.run(rc), errorCodeExtractor); }); @@ -186,7 +197,8 @@ private void ensureClusterSync(Long globalId) throws Exception { } private void ensureClusterSync(String groupId, String artifactId, String version) throws Exception { - retry(() -> registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(version).get()); + retry(() -> registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(version).get()); } private void ensureClusterSync(Consumer function) throws Exception { @@ -194,21 +206,18 @@ private void ensureClusterSync(Consumer function) throws Excepti } protected List listArtifactVersions(RegistryClient rc, String groupId, String artifactId) { - return rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) - .versions().get(config -> { - config.queryParameters.limit = 10; - config.queryParameters.offset = 0; - }) - .getVersions() - .stream() - .map(SearchedVersion::getVersion) - .collect(Collectors.toList()); + return rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(config -> { + config.queryParameters.limit = 10; + config.queryParameters.offset = 0; + }).getVersions().stream().map(SearchedVersion::getVersion).collect(Collectors.toList()); } public static String resourceToString(String resourceName) { - try (InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream(resourceName)) { + try (InputStream stream = Thread.currentThread().getContextClassLoader() + .getResourceAsStream(resourceName)) { assertNotNull(stream, "Resource not found: " + resourceName); - return new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines().collect(Collectors.joining("\n")); + return new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines() + .collect(Collectors.joining("\n")); } catch (IOException e) { throw new RuntimeException(e); } @@ -242,7 +251,8 @@ public static String getRegistryBaseUrl() { if (REGISTRY_URL != null) { return String.format("http://%s:%s", REGISTRY_URL.getHost(), REGISTRY_URL.getPort()); } else { - return String.format("http://%s:%s", System.getProperty("quarkus.http.test-host"), System.getProperty("quarkus.http.test-port")); + return String.format("http://%s:%s", System.getProperty("quarkus.http.test-host"), + System.getProperty("quarkus.http.test-port")); } } @@ -284,17 +294,19 @@ public boolean isReachable() { /** * Poll the given {@code ready} function every {@code pollIntervalMs} milliseconds until it returns true, - * or throw a TimeoutException if it doesn't returns true within {@code timeoutMs} milliseconds. - * (helpful if you have several calls which need to share a common timeout) + * or throw a TimeoutException if it doesn't returns true within {@code timeoutMs} milliseconds. (helpful + * if you have several calls which need to share a common timeout) * * @return The remaining time left until timeout occurs */ - public long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready) throws TimeoutException { + public long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready) + throws TimeoutException { return waitFor(description, pollIntervalMs, timeoutMs, ready, () -> { }); } - public long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready, Runnable onTimeout) throws TimeoutException { + public long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready, + Runnable onTimeout) throws TimeoutException { log.debug("Waiting for {}", description); long deadline = System.currentTimeMillis() + timeoutMs; while (true) { @@ -310,13 +322,15 @@ public long waitFor(String description, long pollIntervalMs, long timeoutMs, Boo } if (timeLeft <= 0) { onTimeout.run(); - TimeoutException exception = new TimeoutException("Timeout after " + timeoutMs + " ms waiting for " + description); + TimeoutException exception = new TimeoutException( + "Timeout after " + timeoutMs + " ms waiting for " + description); exception.printStackTrace(); throw exception; } long sleepTime = Math.min(pollIntervalMs, timeLeft); if (log.isTraceEnabled()) { - log.trace("{} not ready, will try again in {} ms ({}ms till timeout)", description, sleepTime, timeLeft); + log.trace("{} not ready, will try again in {} ms ({}ms till timeout)", description, sleepTime, + timeLeft); } try { Thread.sleep(sleepTime); @@ -330,7 +344,7 @@ public long waitFor(String description, long pollIntervalMs, long timeoutMs, Boo * Method to create and write String content file. * * @param filePath path to file - * @param text content + * @param text content */ public void writeFile(String filePath, String text) { try { @@ -433,7 +447,8 @@ private T retry(Callable callable, String name, int maxRetries, long delt throw new IllegalStateException("Should not be here!"); } - public void assertClientError(String expectedErrorName, int expectedCode, TestUtils.RunnableExc runnable, Function errorCodeExtractor) throws Exception { + public void assertClientError(String expectedErrorName, int expectedCode, TestUtils.RunnableExc runnable, + Function errorCodeExtractor) throws Exception { try { internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor); } catch (Exception e) { @@ -441,22 +456,28 @@ public void assertClientError(String expectedErrorName, int expectedCode, TestUt } } - public void assertClientError(String expectedErrorName, int expectedCode, TestUtils.RunnableExc runnable, boolean retry, Function errorCodeExtractor) throws Exception { + public void assertClientError(String expectedErrorName, int expectedCode, TestUtils.RunnableExc runnable, + boolean retry, Function errorCodeExtractor) throws Exception { if (retry) { - retry(() -> internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor)); + retry(() -> internalAssertClientError(expectedErrorName, expectedCode, runnable, + errorCodeExtractor)); } else { internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor); } } - private void internalAssertClientError(String expectedErrorName, int expectedCode, TestUtils.RunnableExc runnable, Function errorCodeExtractor) { + private void internalAssertClientError(String expectedErrorName, int expectedCode, + TestUtils.RunnableExc runnable, Function errorCodeExtractor) { try { runnable.run(); - Assertions.fail("Expected (but didn't get) a registry client application exception with code: " + expectedCode); + Assertions.fail("Expected (but didn't get) a registry client application exception with code: " + + expectedCode); } catch (Exception e) { Assertions.assertEquals(io.apicurio.registry.rest.client.models.Error.class, e.getClass()); - Assertions.assertEquals(expectedErrorName, ((io.apicurio.registry.rest.client.models.Error)e).getName()); - Assertions.assertEquals(expectedCode, ((io.apicurio.registry.rest.client.models.Error)e).getErrorCode()); + Assertions.assertEquals(expectedErrorName, + ((io.apicurio.registry.rest.client.models.Error) e).getName()); + Assertions.assertEquals(expectedCode, + ((io.apicurio.registry.rest.client.models.Error) e).getErrorCode()); } } @@ -466,7 +487,8 @@ public void waitForSchema(Predicate schemaFinder, byte[] bytes) throws Exc waitForSchema(schemaFinder, bytes, ByteBuffer::getLong); } - public void waitForSchema(Predicate schemaFinder, byte[] bytes, Function globalIdExtractor) throws Exception { + public void waitForSchema(Predicate schemaFinder, byte[] bytes, + Function globalIdExtractor) throws Exception { waitForSchemaCustom(schemaFinder, bytes, input -> { ByteBuffer buffer = ByteBuffer.wrap(input); buffer.get(); // magic byte @@ -475,7 +497,8 @@ public void waitForSchema(Predicate schemaFinder, byte[] bytes, Function schemaFinder, byte[] bytes, Function globalIdExtractor) throws Exception { + public void waitForSchemaCustom(Predicate schemaFinder, byte[] bytes, + Function globalIdExtractor) throws Exception { long id = globalIdExtractor.apply(bytes); boolean schemaExists = retry(() -> schemaFinder.test(id)); Assertions.assertTrue(schemaExists); // wait for global id to populate @@ -493,7 +516,6 @@ public final String normalizeMultiLineString(String value) throws Exception { return builder.toString(); } - public Response getArtifact(String groupId, String artifactId) { return getArtifact(groupId, artifactId, "", 200); } @@ -503,8 +525,8 @@ public Response getArtifact(String groupId, String artifactId, int returnCode) { } public Response getArtifact(String groupId, String artifactId, String version, int returnCode) { - return - getRequest(RestConstants.JSON, "/groups/" + encodeURIComponent(groupId) + "/artifacts/" + encodeURIComponent(artifactId) + "/" + version, returnCode); + return getRequest(RestConstants.JSON, "/groups/" + encodeURIComponent(groupId) + "/artifacts/" + + encodeURIComponent(artifactId) + "/" + version, returnCode); } private String encodeURIComponent(String value) { @@ -516,155 +538,69 @@ private String encodeURIComponent(String value) { } public Response getRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).get(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response getRequest(String contentType, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).get(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response postRequest(String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .post(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).post(getRegistryV3ApiUrl() + endpoint) + .then().statusCode(returnCode).extract().response(); } public Response postRequest(String contentType, String body, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .post(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).post(getRegistryV3ApiUrl() + endpoint) + .then().statusCode(returnCode).extract().response(); } public Response putRequest(String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .put(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).put(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response putRequest(String contentType, String body, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .put(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).put(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response deleteRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .delete(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).delete(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response rulesPostRequest(String contentType, String rule, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .post(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(rule).post(getRegistryV3ApiUrl() + endpoint) + .then().statusCode(returnCode).extract().response(); } public Response rulesPostRequest(String contentType, String rule, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .post(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(rule).post(getRegistryV3ApiUrl() + endpoint) + .then().statusCode(returnCode).extract().response(); } public Response rulesGetRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).get(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response rulesPutRequest(String contentType, String rule, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .put(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(rule).put(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); } public Response rulesDeleteRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .delete(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); - } - - public Response artifactPostRequest(String artifactId, String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .header("X-Registry-Artifactid", artifactId) - .contentType(contentType) - .body(body) - .post(getRegistryV3ApiUrl() + endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).delete(getRegistryV3ApiUrl() + endpoint).then() + .statusCode(returnCode).extract().response(); + } + + public Response artifactPostRequest(String artifactId, String contentType, String body, String endpoint, + int returnCode) { + return given().when().header("X-Registry-Artifactid", artifactId).contentType(contentType).body(body) + .post(getRegistryV3ApiUrl() + endpoint).then().statusCode(returnCode).extract().response(); } protected void assertNotAuthorized(Exception exception) { @@ -676,6 +612,6 @@ protected void assertNotAuthorized(Exception exception) { protected void assertForbidden(Exception exception) { assertNotNull(exception); Assertions.assertEquals(ApiException.class, exception.getClass()); - Assertions.assertEquals(403, ((ApiException)exception).getResponseStatusCode()); + Assertions.assertEquals(403, ((ApiException) exception).getResponseStatusCode()); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/ConfluentBaseIT.java b/integration-tests/src/test/java/io/apicurio/tests/ConfluentBaseIT.java index dd2e0ef74f..df7fa368cd 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/ConfluentBaseIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/ConfluentBaseIT.java @@ -1,7 +1,7 @@ package io.apicurio.tests; -import io.apicurio.tests.utils.Constants; import io.apicurio.registry.utils.tests.TestUtils; +import io.apicurio.tests.utils.Constants; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; @@ -23,7 +23,8 @@ public abstract class ConfluentBaseIT extends ApicurioRegistryBaseIT { @BeforeAll void confluentBeforeAll(TestInfo info) throws Exception { - confluentService = new CachedSchemaRegistryClient(ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7", 3); + confluentService = new CachedSchemaRegistryClient( + ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7", 3); clearAllConfluentSubjects(); } @@ -32,23 +33,26 @@ void clear() throws IOException, RestClientException { clearAllConfluentSubjects(); } - public int createArtifactViaConfluentClient(ParsedSchema schema, String artifactName) throws IOException, RestClientException, TimeoutException { + public int createArtifactViaConfluentClient(ParsedSchema schema, String artifactName) + throws IOException, RestClientException, TimeoutException { int idOfSchema = confluentService.register(artifactName, schema); confluentService.reset(); // clear cache - TestUtils.waitFor("Wait until artifact globalID mapping is finished", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, - () -> { - try { - ParsedSchema newSchema = confluentService.getSchemaBySubjectAndId(artifactName, idOfSchema); - logger.info("Checking that created schema is equal to the get schema"); - assertThat(schema.toString(), is(newSchema.toString())); - assertThat(confluentService.getVersion(artifactName, schema), is(confluentService.getVersion(artifactName, newSchema))); - logger.info("Created schema with id:{} and name:{}", idOfSchema, newSchema.name()); - return true; - } catch (IOException | RestClientException e) { - logger.debug("", e); - return false; - } - }); + TestUtils.waitFor("Wait until artifact globalID mapping is finished", Constants.POLL_INTERVAL, + Constants.TIMEOUT_GLOBAL, () -> { + try { + ParsedSchema newSchema = confluentService.getSchemaBySubjectAndId(artifactName, + idOfSchema); + logger.info("Checking that created schema is equal to the get schema"); + assertThat(schema.toString(), is(newSchema.toString())); + assertThat(confluentService.getVersion(artifactName, schema), + is(confluentService.getVersion(artifactName, newSchema))); + logger.info("Created schema with id:{} and name:{}", idOfSchema, newSchema.name()); + return true; + } catch (IOException | RestClientException e) { + logger.debug("", e); + return false; + } + }); return idOfSchema; } @@ -61,7 +65,7 @@ protected void clearAllConfluentSubjects() throws IOException, RestClientExcepti confluentService.deleteSubject(confluentSubject); } catch (RestClientException e) { if (e.getStatus() == 404) { - //subjects may be already deleted + // subjects may be already deleted continue; } throw e; diff --git a/integration-tests/src/test/java/io/apicurio/tests/auth/SimpleAuthIT.java b/integration-tests/src/test/java/io/apicurio/tests/auth/SimpleAuthIT.java index bc0ebac55a..21aab9d952 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/auth/SimpleAuthIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/auth/SimpleAuthIT.java @@ -46,7 +46,7 @@ public class SimpleAuthIT extends ApicurioRegistryBaseIT { @Override public void cleanArtifacts() throws Exception { - //Don't clean + // Don't clean } @Override @@ -63,7 +63,8 @@ private RegistryClient createClient(WebClient auth) { @Test public void testWrongCreds() throws Exception { - var auth = buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, "test55"); + var auth = buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.WRONG_CREDS_CLIENT_ID, + "test55"); RegistryClient client = createClient(auth); var exception = Assertions.assertThrows(Exception.class, () -> { client.groups().byGroupId("foo").artifacts().get(); @@ -73,7 +74,8 @@ public void testWrongCreds() throws Exception { @Test public void testReadOnly() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.READONLY_CLIENT_ID, "test1")); adapter.setBaseUrl(getRegistryV3ApiUrl()); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -92,13 +94,16 @@ public void testReadOnly() throws Exception { }); assertForbidden(exception3); - var devAdapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var devAdapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); devAdapter.setBaseUrl(getRegistryV3ApiUrl()); RegistryClient devClient = new RegistryClient(devAdapter); - VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact).getVersion(); + VersionMetaData meta = devClient.groups().byGroupId(groupId).artifacts().post(createArtifact) + .getVersion(); - TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts().byArtifactId(meta.getArtifactId()).get()); + TestUtils.retry(() -> devClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(meta.getArtifactId()).get()); assertNotNull(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); @@ -112,7 +117,8 @@ public void testReadOnly() throws Exception { @Test public void testDevRole() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.DEVELOPER_CLIENT_ID, "test1")); adapter.setBaseUrl(getRegistryV3ApiUrl()); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -121,9 +127,12 @@ public void testDevRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - Assertions.assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + Assertions.assertTrue( + client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -148,7 +157,8 @@ public void testDevRole() throws Exception { @Test public void testAdminRole() throws Exception { - var adapter = new VertXRequestAdapter(buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); + var adapter = new VertXRequestAdapter( + buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1")); adapter.setBaseUrl(getRegistryV3ApiUrl()); RegistryClient client = new RegistryClient(adapter); String artifactId = TestUtils.generateArtifactId(); @@ -157,9 +167,12 @@ public void testAdminRole() throws Exception { createArtifact.setArtifactId(artifactId); client.groups().byGroupId(groupId).artifacts().post(createArtifact); - TestUtils.retry(() -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); + TestUtils.retry( + () -> client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); - Assertions.assertTrue(client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); + Assertions.assertTrue( + client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get().readAllBytes().length > 0); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); @@ -181,7 +194,9 @@ public void testAdminRole() throws Exception { protected void assertArtifactNotFound(Exception exception) { Assertions.assertEquals(io.apicurio.registry.rest.client.models.Error.class, exception.getClass()); - Assertions.assertEquals("ArtifactNotFoundException", ((io.apicurio.registry.rest.client.models.Error) exception).getName()); - Assertions.assertEquals(404, ((io.apicurio.registry.rest.client.models.Error) exception).getErrorCode()); + Assertions.assertEquals("ArtifactNotFoundException", + ((io.apicurio.registry.rest.client.models.Error) exception).getName()); + Assertions.assertEquals(404, + ((io.apicurio.registry.rest.client.models.Error) exception).getErrorCode()); } } \ No newline at end of file diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/TestObject.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/TestObject.java index 98e9ea6dcf..3dd1f81b0f 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/TestObject.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/TestObject.java @@ -2,7 +2,6 @@ import java.util.Objects; - public class TestObject { private String name; diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/InvalidMessage.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/InvalidMessage.java index 057de2dd31..14d9b00044 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/InvalidMessage.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/InvalidMessage.java @@ -1,10 +1,10 @@ package io.apicurio.tests.common.serdes.json; public class InvalidMessage extends Msg { - + private String foo; private String bar; - + /** * Constructor. */ diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/Msg.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/Msg.java index ec2f1d5196..2918334b1c 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/Msg.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/Msg.java @@ -4,7 +4,7 @@ public class Msg { private String message; private long time; - + /** * Constructor. */ diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/ValidMessage.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/ValidMessage.java index 738c53e9d3..5fc8bca146 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/ValidMessage.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/json/ValidMessage.java @@ -1,5 +1,5 @@ package io.apicurio.tests.common.serdes.json; public class ValidMessage extends Msg { - + } diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/MsgTypes.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/MsgTypes.java index 75534e96cb..67325e6d59 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/MsgTypes.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/MsgTypes.java @@ -5,681 +5,674 @@ @SuppressWarnings("all") public final class MsgTypes { - private MsgTypes() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); - } - public interface MsgOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.tests.serdes.proto.Msg) - com.google.protobuf.MessageOrBuilder { - - /** - * string what = 1; - * @return The what. - */ - String getWhat(); - /** - * string what = 1; - * @return The bytes for what. - */ - com.google.protobuf.ByteString - getWhatBytes(); - - /** - * fixed64 when = 2; - * @return The when. - */ - long getWhen(); - } - /** - * Protobuf type {@code io.apicurio.tests.serdes.proto.Msg} - */ - public static final class Msg extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.tests.serdes.proto.Msg) - MsgOrBuilder { - private static final long serialVersionUID = 0L; - // Use Msg.newBuilder() to construct. - private Msg(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Msg() { - what_ = ""; + private MsgTypes() { } - @Override - @SuppressWarnings({"unused"}) - protected Object newInstance( - UnusedPrivateParameter unused) { - return new Msg(); + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) { } - @Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private Msg( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - String s = input.readStringRequireUtf8(); - - what_ = s; - break; - } - case 17: { - - when_ = input.readFixed64(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable - .ensureFieldAccessorsInitialized( - MsgTypes.Msg.class, MsgTypes.Msg.Builder.class); - } + public interface MsgOrBuilder extends + // @@protoc_insertion_point(interface_extends:io.apicurio.tests.serdes.proto.Msg) + com.google.protobuf.MessageOrBuilder { - public static final int WHAT_FIELD_NUMBER = 1; - private volatile Object what_; - /** - * string what = 1; - * @return The what. - */ - public String getWhat() { - Object ref = what_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - what_ = s; - return s; - } - } - /** - * string what = 1; - * @return The bytes for what. - */ - public com.google.protobuf.ByteString - getWhatBytes() { - Object ref = what_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - what_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + /** + * string what = 1; + * + * @return The what. + */ + String getWhat(); + + /** + * string what = 1; + * + * @return The bytes for what. + */ + com.google.protobuf.ByteString getWhatBytes(); + + /** + * fixed64 when = 2; + * + * @return The when. + */ + long getWhen(); } - public static final int WHEN_FIELD_NUMBER = 2; - private long when_; /** - * fixed64 when = 2; - * @return The when. + * Protobuf type {@code io.apicurio.tests.serdes.proto.Msg} */ - public long getWhen() { - return when_; - } + public static final class Msg extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.tests.serdes.proto.Msg) + MsgOrBuilder { + private static final long serialVersionUID = 0L; + + // Use Msg.newBuilder() to construct. + private Msg(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } - private byte memoizedIsInitialized = -1; - @Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; + private Msg() { + what_ = ""; + } - memoizedIsInitialized = 1; - return true; - } + @Override + @SuppressWarnings({ "unused" }) + protected Object newInstance(UnusedPrivateParameter unused) { + return new Msg(); + } - @Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getWhatBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 1, what_); - } - if (when_ != 0L) { - output.writeFixed64(2, when_); - } - unknownFields.writeTo(output); - } + @Override + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; + } - @Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getWhatBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, what_); - } - if (when_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeFixed64Size(2, when_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } + private Msg(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + String s = input.readStringRequireUtf8(); + + what_ = s; + break; + } + case 17: { + + when_ = input.readFixed64(); + break; + } + default: { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } - @Override - public boolean equals(final Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof MsgTypes.Msg)) { - return super.equals(obj); - } - MsgTypes.Msg other = (MsgTypes.Msg) obj; - - if (!getWhat() - .equals(other.getWhat())) return false; - if (getWhen() - != other.getWhen()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; + } - @Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + WHAT_FIELD_NUMBER; - hash = (53 * hash) + getWhat().hashCode(); - hash = (37 * hash) + WHEN_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getWhen()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } + @Override + protected FieldAccessorTable internalGetFieldAccessorTable() { + return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable + .ensureFieldAccessorsInitialized(MsgTypes.Msg.class, MsgTypes.Msg.Builder.class); + } - public static MsgTypes.Msg parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static MsgTypes.Msg parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static MsgTypes.Msg parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static MsgTypes.Msg parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static MsgTypes.Msg parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static MsgTypes.Msg parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static MsgTypes.Msg parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static MsgTypes.Msg parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static MsgTypes.Msg parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static MsgTypes.Msg parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static MsgTypes.Msg parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static MsgTypes.Msg parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } + public static final int WHAT_FIELD_NUMBER = 1; + private volatile Object what_; + + /** + * string what = 1; + * + * @return The what. + */ + public String getWhat() { + Object ref = what_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + what_ = s; + return s; + } + } - @Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(MsgTypes.Msg prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } + /** + * string what = 1; + * + * @return The bytes for what. + */ + public com.google.protobuf.ByteString getWhatBytes() { + Object ref = what_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + what_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } - @Override - protected Builder newBuilderForType( - BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code io.apicurio.tests.serdes.proto.Msg} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.tests.serdes.proto.Msg) - MsgTypes.MsgOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable - .ensureFieldAccessorsInitialized( - MsgTypes.Msg.class, MsgTypes.Msg.Builder.class); - } - - // Construct using io.apicurio.tests.serdes.proto.MsgTypes.Msg.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @Override - public Builder clear() { - super.clear(); - what_ = ""; - - when_ = 0L; - - return this; - } - - @Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; - } - - @Override - public MsgTypes.Msg getDefaultInstanceForType() { - return MsgTypes.Msg.getDefaultInstance(); - } - - @Override - public MsgTypes.Msg build() { - MsgTypes.Msg result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @Override - public MsgTypes.Msg buildPartial() { - MsgTypes.Msg result = new MsgTypes.Msg(this); - result.what_ = what_; - result.when_ = when_; - onBuilt(); - return result; - } - - @Override - public Builder clone() { - return super.clone(); - } - @Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.setField(field, value); - } - @Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return super.setRepeatedField(field, index, value); - } - @Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.addRepeatedField(field, value); - } - @Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof MsgTypes.Msg) { - return mergeFrom((MsgTypes.Msg)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(MsgTypes.Msg other) { - if (other == MsgTypes.Msg.getDefaultInstance()) return this; - if (!other.getWhat().isEmpty()) { - what_ = other.what_; - onChanged(); - } - if (other.getWhen() != 0L) { - setWhen(other.getWhen()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @Override - public final boolean isInitialized() { - return true; - } - - @Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - MsgTypes.Msg parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (MsgTypes.Msg) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private Object what_ = ""; - /** - * string what = 1; - * @return The what. - */ - public String getWhat() { - Object ref = what_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - what_ = s; - return s; - } else { - return (String) ref; - } - } - /** - * string what = 1; - * @return The bytes for what. - */ - public com.google.protobuf.ByteString - getWhatBytes() { - Object ref = what_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - what_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string what = 1; - * @param value The what to set. - * @return This builder for chaining. - */ - public Builder setWhat( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - what_ = value; - onChanged(); - return this; - } - /** - * string what = 1; - * @return This builder for chaining. - */ - public Builder clearWhat() { - - what_ = getDefaultInstance().getWhat(); - onChanged(); - return this; - } - /** - * string what = 1; - * @param value The bytes for what to set. - * @return This builder for chaining. - */ - public Builder setWhatBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - what_ = value; - onChanged(); - return this; - } - - private long when_ ; - /** - * fixed64 when = 2; - * @return The when. - */ - public long getWhen() { - return when_; - } - /** - * fixed64 when = 2; - * @param value The when to set. - * @return This builder for chaining. - */ - public Builder setWhen(long value) { - - when_ = value; - onChanged(); - return this; - } - /** - * fixed64 when = 2; - * @return This builder for chaining. - */ - public Builder clearWhen() { - - when_ = 0L; - onChanged(); - return this; - } - @Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:io.apicurio.tests.serdes.proto.Msg) - } + public static final int WHEN_FIELD_NUMBER = 2; + private long when_; - // @@protoc_insertion_point(class_scope:io.apicurio.tests.serdes.proto.Msg) - private static final MsgTypes.Msg DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new MsgTypes.Msg(); - } + /** + * fixed64 when = 2; + * + * @return The when. + */ + public long getWhen() { + return when_; + } - public static MsgTypes.Msg getDefaultInstance() { - return DEFAULT_INSTANCE; - } + private byte memoizedIsInitialized = -1; - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @Override - public Msg parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Msg(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; + + memoizedIsInitialized = 1; + return true; + } + + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (!getWhatBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, what_); + } + if (when_ != 0L) { + output.writeFixed64(2, when_); + } + unknownFields.writeTo(output); + } + + @Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) + return size; + + size = 0; + if (!getWhatBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, what_); + } + if (when_ != 0L) { + size += com.google.protobuf.CodedOutputStream.computeFixed64Size(2, when_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof MsgTypes.Msg)) { + return super.equals(obj); + } + MsgTypes.Msg other = (MsgTypes.Msg) obj; + + if (!getWhat().equals(other.getWhat())) + return false; + if (getWhen() != other.getWhen()) + return false; + if (!unknownFields.equals(other.unknownFields)) + return false; + return true; + } + + @Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + WHAT_FIELD_NUMBER; + hash = (53 * hash) + getWhat().hashCode(); + hash = (37 * hash) + WHEN_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getWhen()); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static MsgTypes.Msg parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static MsgTypes.Msg parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static MsgTypes.Msg parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static MsgTypes.Msg parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static MsgTypes.Msg parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static MsgTypes.Msg parseFrom(byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static MsgTypes.Msg parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static MsgTypes.Msg parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); + } + + public static MsgTypes.Msg parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static MsgTypes.Msg parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static MsgTypes.Msg parseFrom(com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static MsgTypes.Msg parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); + } + + @Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(MsgTypes.Msg prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @Override + protected Builder newBuilderForType(BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + + /** + * Protobuf type {@code io.apicurio.tests.serdes.proto.Msg} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.tests.serdes.proto.Msg) + MsgTypes.MsgOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; + } + + @Override + protected FieldAccessorTable internalGetFieldAccessorTable() { + return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable + .ensureFieldAccessorsInitialized(MsgTypes.Msg.class, MsgTypes.Msg.Builder.class); + } + + // Construct using io.apicurio.tests.serdes.proto.MsgTypes.Msg.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + } + } + + @Override + public Builder clear() { + super.clear(); + what_ = ""; + + when_ = 0L; + + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return MsgTypes.internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; + } + + @Override + public MsgTypes.Msg getDefaultInstanceForType() { + return MsgTypes.Msg.getDefaultInstance(); + } + + @Override + public MsgTypes.Msg build() { + MsgTypes.Msg result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public MsgTypes.Msg buildPartial() { + MsgTypes.Msg result = new MsgTypes.Msg(this); + result.what_ = what_; + result.when_ = when_; + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof MsgTypes.Msg) { + return mergeFrom((MsgTypes.Msg) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(MsgTypes.Msg other) { + if (other == MsgTypes.Msg.getDefaultInstance()) + return this; + if (!other.getWhat().isEmpty()) { + what_ = other.what_; + onChanged(); + } + if (other.getWhen() != 0L) { + setWhen(other.getWhen()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + + @Override + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + MsgTypes.Msg parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (MsgTypes.Msg) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private Object what_ = ""; + + /** + * string what = 1; + * + * @return The what. + */ + public String getWhat() { + Object ref = what_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + what_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string what = 1; + * + * @return The bytes for what. + */ + public com.google.protobuf.ByteString getWhatBytes() { + Object ref = what_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString + .copyFromUtf8((String) ref); + what_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string what = 1; + * + * @param value The what to set. + * @return This builder for chaining. + */ + public Builder setWhat(String value) { + if (value == null) { + throw new NullPointerException(); + } + + what_ = value; + onChanged(); + return this; + } + + /** + * string what = 1; + * + * @return This builder for chaining. + */ + public Builder clearWhat() { + + what_ = getDefaultInstance().getWhat(); + onChanged(); + return this; + } + + /** + * string what = 1; + * + * @param value The bytes for what to set. + * @return This builder for chaining. + */ + public Builder setWhatBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + what_ = value; + onChanged(); + return this; + } + + private long when_; + + /** + * fixed64 when = 2; + * + * @return The when. + */ + public long getWhen() { + return when_; + } + + /** + * fixed64 when = 2; + * + * @param value The when to set. + * @return This builder for chaining. + */ + public Builder setWhen(long value) { + + when_ = value; + onChanged(); + return this; + } + + /** + * fixed64 when = 2; + * + * @return This builder for chaining. + */ + public Builder clearWhen() { + + when_ = 0L; + onChanged(); + return this; + } + + @Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:io.apicurio.tests.serdes.proto.Msg) + } + + // @@protoc_insertion_point(class_scope:io.apicurio.tests.serdes.proto.Msg) + private static final MsgTypes.Msg DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new MsgTypes.Msg(); + } + + public static MsgTypes.Msg getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { + @Override + public Msg parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Msg(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @Override + public MsgTypes.Msg getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } - @Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; } - @Override - public MsgTypes.Msg getDefaultInstanceForType() { - return DEFAULT_INSTANCE; + private static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; + private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + return descriptor; } - } - - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - String[] descriptorData = { - "\n\tlog.proto\022\036io.apicurio.tests.serdes.pr" + - "oto\"!\n\003Msg\022\014\n\004what\030\001 \001(\t\022\014\n\004when\030\002 \001(\006B*" + - "\n\036io.apicurio.tests.serdes.protoB\010MsgTyp" + - "esb\006proto3" - }; - descriptor = com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }); - internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor, - new String[] { "What", "When", }); - } - - // @@protoc_insertion_point(outer_class_scope) + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + static { + String[] descriptorData = { "\n\tlog.proto\022\036io.apicurio.tests.serdes.pr" + + "oto\"!\n\003Msg\022\014\n\004what\030\001 \001(\t\022\014\n\004when\030\002 \001(\006B*" + + "\n\036io.apicurio.tests.serdes.protoB\010MsgTyp" + "esb\006proto3" }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}); + internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor = getDescriptor().getMessageTypes() + .get(0); + internal_static_io_apicurio_tests_serdes_proto_Msg_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_tests_serdes_proto_Msg_descriptor, + new String[] { "What", "When", }); + } + + // @@protoc_insertion_point(outer_class_scope) } diff --git a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/TestCmmn.java b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/TestCmmn.java index 03056794a7..5a14a69625 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/TestCmmn.java +++ b/integration-tests/src/test/java/io/apicurio/tests/common/serdes/proto/TestCmmn.java @@ -4,19 +4,19 @@ package io.apicurio.tests.common.serdes.proto; public final class TestCmmn { - private TestCmmn() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { + private TestCmmn() { } - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) { } + + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface UUIDOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.registry.common.proto.UUID) - com.google.protobuf.MessageOrBuilder { + // @@protoc_insertion_point(interface_extends:io.apicurio.registry.common.proto.UUID) + com.google.protobuf.MessageOrBuilder { /** * fixed64 msb = 1; @@ -28,43 +28,43 @@ public interface UUIDOrBuilder extends */ long getLsb(); } + /** * Protobuf type {@code io.apicurio.registry.common.proto.UUID} */ - public static final class UUID extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.registry.common.proto.UUID) - UUIDOrBuilder { + public static final class UUID extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.registry.common.proto.UUID) + UUIDOrBuilder { private static final long serialVersionUID = 0L; + // Use UUID.newBuilder() to construct. private UUID(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } + private UUID() { } @Override - @SuppressWarnings({"unused"}) - protected Object newInstance( - UnusedPrivateParameter unused) { + @SuppressWarnings({ "unused" }) + protected Object newInstance(UnusedPrivateParameter unused) { return new UUID(); } @Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } - private UUID( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + private UUID(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new NullPointerException(); } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); try { boolean done = false; while (!done) { @@ -84,8 +84,7 @@ private UUID( break; } default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; @@ -95,28 +94,26 @@ private UUID( } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { + + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { + protected FieldAccessorTable internalGetFieldAccessorTable() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable - .ensureFieldAccessorsInitialized( - UUID.class, Builder.class); + .ensureFieldAccessorsInitialized(UUID.class, Builder.class); } public static final int MSB_FIELD_NUMBER = 1; private long msb_; + /** * fixed64 msb = 1; */ @@ -126,6 +123,7 @@ public long getMsb() { public static final int LSB_FIELD_NUMBER = 2; private long lsb_; + /** * fixed64 lsb = 2; */ @@ -134,19 +132,21 @@ public long getLsb() { } private byte memoizedIsInitialized = -1; + @Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; memoizedIsInitialized = 1; return true; } @Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (msb_ != 0L) { output.writeFixed64(1, msb_); } @@ -159,16 +159,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) @Override public int getSerializedSize() { int size = memoizedSize; - if (size != -1) return size; + if (size != -1) + return size; size = 0; if (msb_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeFixed64Size(1, msb_); + size += com.google.protobuf.CodedOutputStream.computeFixed64Size(1, msb_); } if (lsb_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeFixed64Size(2, lsb_); + size += com.google.protobuf.CodedOutputStream.computeFixed64Size(2, lsb_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -185,11 +184,12 @@ public boolean equals(final Object obj) { } UUID other = (UUID) obj; - if (getMsb() - != other.getMsb()) return false; - if (getLsb() - != other.getLsb()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; + if (getMsb() != other.getMsb()) + return false; + if (getLsb() != other.getLsb()) + return false; + if (!unknownFields.equals(other.unknownFields)) + return false; return true; } @@ -202,124 +202,114 @@ public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + MSB_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getMsb()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getMsb()); hash = (37 * hash) + LSB_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getLsb()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getLsb()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static UUID parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { + public static UUID parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static UUID parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static UUID parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static UUID parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static UUID parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static UUID parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static UUID parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static UUID parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static UUID parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static UUID parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + + public static UUID parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static UUID parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static UUID parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static UUID parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static UUID parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static UUID parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static UUID parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); + + public static UUID parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static UUID parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); + } + + public static UUID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static UUID parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static UUID parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static UUID parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, + extensionRegistry); } @Override - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() { + return newBuilder(); + } + public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } + public static Builder newBuilder(UUID prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @Override public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @Override - protected Builder newBuilderForType( - BuilderParent parent) { + protected Builder newBuilderForType(BuilderParent parent) { Builder builder = new Builder(parent); return builder; } + /** * Protobuf type {@code io.apicurio.registry.common.proto.UUID} */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.registry.common.proto.UUID) - UUIDOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.registry.common.proto.UUID) + UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { + protected FieldAccessorTable internalGetFieldAccessorTable() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable - .ensureFieldAccessorsInitialized( - UUID.class, Builder.class); + .ensureFieldAccessorsInitialized(UUID.class, Builder.class); } // Construct using io.apicurio.registry.support.Cmmn.UUID.newBuilder() @@ -327,16 +317,16 @@ private Builder() { maybeForceBuilderInitialization(); } - private Builder( - BuilderParent parent) { + private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } + private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { } } + @Override public Builder clear() { super.clear(); @@ -348,8 +338,7 @@ public Builder clear() { } @Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return TestCmmn.internal_static_io_apicurio_registry_common_proto_UUID_descriptor; } @@ -380,38 +369,38 @@ public UUID buildPartial() { public Builder clone() { return super.clone(); } + @Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return super.setField(field, value); } + @Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } + @Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } + @Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + Object value) { return super.setRepeatedField(field, index, value); } + @Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { return super.addRepeatedField(field, value); } + @Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof UUID) { - return mergeFrom((UUID)other); + return mergeFrom((UUID) other); } else { super.mergeFrom(other); return this; @@ -419,7 +408,8 @@ public Builder mergeFrom(com.google.protobuf.Message other) { } public Builder mergeFrom(UUID other) { - if (other == UUID.getDefaultInstance()) return this; + if (other == UUID.getDefaultInstance()) + return this; if (other.getMsb() != 0L) { setMsb(other.getMsb()); } @@ -437,10 +427,8 @@ public final boolean isInitialized() { } @Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { UUID parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); @@ -455,13 +443,15 @@ public Builder mergeFrom( return this; } - private long msb_ ; + private long msb_; + /** * fixed64 msb = 1; */ public long getMsb() { return msb_; } + /** * fixed64 msb = 1; */ @@ -471,6 +461,7 @@ public Builder setMsb(long value) { onChanged(); return this; } + /** * fixed64 msb = 1; */ @@ -481,13 +472,15 @@ public Builder clearMsb() { return this; } - private long lsb_ ; + private long lsb_; + /** * fixed64 lsb = 2; */ public long getLsb() { return lsb_; } + /** * fixed64 lsb = 2; */ @@ -497,6 +490,7 @@ public Builder setLsb(long value) { onChanged(); return this; } + /** * fixed64 lsb = 2; */ @@ -506,19 +500,17 @@ public Builder clearLsb() { onChanged(); return this; } + @Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } - // @@protoc_insertion_point(builder_scope:io.apicurio.registry.common.proto.UUID) } @@ -532,13 +524,11 @@ public static UUID getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { @Override - public UUID parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + public UUID parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { return new UUID(input, extensionRegistry); } }; @@ -559,35 +549,25 @@ public UUID getDefaultInstanceForType() { } - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_registry_common_proto_UUID_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_registry_common_proto_UUID_descriptor; + private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable; - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { - String[] descriptorData = { - "\n\014common.proto\022!io.apicurio.registry.com" + - "mon.proto\" \n\004UUID\022\013\n\003msb\030\001 \001(\006\022\013\n\003lsb\030\002 " + - "\001(\006B)\n!io.apicurio.registry.common.proto" + - "B\004Cmmnb\006proto3" - }; - descriptor = com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }); - internal_static_io_apicurio_registry_common_proto_UUID_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_registry_common_proto_UUID_descriptor, - new String[] { "Msb", "Lsb", }); + String[] descriptorData = { "\n\014common.proto\022!io.apicurio.registry.com" + + "mon.proto\" \n\004UUID\022\013\n\003msb\030\001 \001(\006\022\013\n\003lsb\030\002 " + + "\001(\006B)\n!io.apicurio.registry.common.proto" + "B\004Cmmnb\006proto3" }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}); + internal_static_io_apicurio_registry_common_proto_UUID_descriptor = getDescriptor().getMessageTypes() + .get(0); + internal_static_io_apicurio_registry_common_proto_UUID_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_registry_common_proto_UUID_descriptor, + new String[] { "Msb", "Lsb", }); } // @@protoc_insertion_point(outer_class_scope) diff --git a/integration-tests/src/test/java/io/apicurio/tests/converters/RegistryConverterIT.java b/integration-tests/src/test/java/io/apicurio/tests/converters/RegistryConverterIT.java index a00ec74fb9..1b8f02db90 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/converters/RegistryConverterIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/converters/RegistryConverterIT.java @@ -47,7 +47,7 @@ public class RegistryConverterIT extends ApicurioRegistryBaseIT { @Override public void cleanArtifacts() throws Exception { - //Don't clean up + // Don't clean up } @Test @@ -55,18 +55,22 @@ public void testConfiguration() throws Exception { String groupId = TestUtils.generateGroupId(); String topic = TestUtils.generateArtifactId(); String recordName = "myrecord4"; - AvroGenericRecordSchemaFactory schemaFactory = new AvroGenericRecordSchemaFactory(groupId, recordName, List.of("bar")); + AvroGenericRecordSchemaFactory schemaFactory = new AvroGenericRecordSchemaFactory(groupId, recordName, + List.of("bar")); Schema schema = schemaFactory.generateSchema(); - createArtifact(groupId, topic + "-" + recordName, ArtifactType.AVRO, schema.toString(), ContentTypes.APPLICATION_JSON, null, null); + createArtifact(groupId, topic + "-" + recordName, ArtifactType.AVRO, schema.toString(), + ContentTypes.APPLICATION_JSON, null, null); Record record = new Record(schema); record.put("bar", "somebar"); Map config = new HashMap<>(); config.put(SerdeConfig.REGISTRY_URL, getRegistryV3ApiUrl()); - config.put(SerdeBasedConverter.REGISTRY_CONVERTER_SERIALIZER_PARAM, AvroKafkaSerializer.class.getName()); - config.put(SerdeBasedConverter.REGISTRY_CONVERTER_DESERIALIZER_PARAM, AvroKafkaDeserializer.class.getName()); + config.put(SerdeBasedConverter.REGISTRY_CONVERTER_SERIALIZER_PARAM, + AvroKafkaSerializer.class.getName()); + config.put(SerdeBasedConverter.REGISTRY_CONVERTER_DESERIALIZER_PARAM, + AvroKafkaDeserializer.class.getName()); config.put(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, TopicRecordIdStrategy.class.getName()); config.put(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, DefaultAvroDatumProvider.class.getName()); SerdeBasedConverter converter = new SerdeBasedConverter<>(); @@ -85,21 +89,12 @@ record = (Record) converter.toConnectData(topic, bytes).value(); @Test public void testAvroIntDefaultValue() throws Exception { - String expectedSchema = "{\n" + - " \"type\" : \"record\",\n" + - " \"name\" : \"ConnectDefault\",\n" + - " \"namespace\" : \"io.confluent.connect.avro\",\n" + - " \"fields\" : [ {\n" + - " \"name\" : \"int16Test\",\n" + - " \"type\" : [ {\n" + - " \"type\" : \"int\",\n" + - " \"connect.doc\" : \"int16test field\",\n" + - " \"connect.default\" : 2,\n" + - " \"connect.type\" : \"int16\"\n" + - " }, \"null\" ],\n" + - " \"default\" : 2\n" + - " } ]\n" + - "}"; + String expectedSchema = "{\n" + " \"type\" : \"record\",\n" + " \"name\" : \"ConnectDefault\",\n" + + " \"namespace\" : \"io.confluent.connect.avro\",\n" + " \"fields\" : [ {\n" + + " \"name\" : \"int16Test\",\n" + " \"type\" : [ {\n" + " \"type\" : \"int\",\n" + + " \"connect.doc\" : \"int16test field\",\n" + " \"connect.default\" : 2,\n" + + " \"connect.type\" : \"int16\"\n" + " }, \"null\" ],\n" + " \"default\" : 2\n" + + " } ]\n" + "}"; try (AvroConverter converter = new AvroConverter<>()) { @@ -108,9 +103,8 @@ public void testAvroIntDefaultValue() throws Exception { config.put(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true"); converter.configure(config, false); - org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct() - .field("int16Test", SchemaBuilder.int16().optional().defaultValue((short) 2).doc("int16test field") - .build()); + org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct().field("int16Test", + SchemaBuilder.int16().optional().defaultValue((short) 2).doc("int16test field").build()); Struct struct = new Struct(sc); struct.put("int16Test", (short) 3); @@ -121,7 +115,8 @@ public void testAvroIntDefaultValue() throws Exception { // some impl details ... TestUtils.waitForSchema(globalId -> { try { - return registryClient.ids().globalIds().byGlobalId(globalId).get().readAllBytes().length > 0; + return registryClient.ids().globalIds().byGlobalId(globalId).get() + .readAllBytes().length > 0; } catch (IOException e) { throw new RuntimeException(e); } @@ -137,22 +132,12 @@ public void testAvroIntDefaultValue() throws Exception { @Test public void testAvroBytesDefaultValue() throws Exception { - String expectedSchema = "{\n" + - " \"type\" : \"record\",\n" + - " \"name\" : \"ConnectDefault\",\n" + - " \"namespace\" : \"io.confluent.connect.avro\",\n" + - " \"fields\" : [ {\n" + - " \"name\" : \"bytesTest\",\n" + - " \"type\" : [ {\n" + - " \"type\" : \"bytes\",\n" + - " \"connect.parameters\" : {\n" + - " \"lenght\" : \"10\"\n" + - " },\n" + - " \"connect.default\" : \"test\"\n" + - " }, \"null\" ],\n" + - " \"default\" : \"test\"\n" + - " } ]\n" + - "}"; + String expectedSchema = "{\n" + " \"type\" : \"record\",\n" + " \"name\" : \"ConnectDefault\",\n" + + " \"namespace\" : \"io.confluent.connect.avro\",\n" + " \"fields\" : [ {\n" + + " \"name\" : \"bytesTest\",\n" + " \"type\" : [ {\n" + " \"type\" : \"bytes\",\n" + + " \"connect.parameters\" : {\n" + " \"lenght\" : \"10\"\n" + " },\n" + + " \"connect.default\" : \"test\"\n" + " }, \"null\" ],\n" + + " \"default\" : \"test\"\n" + " } ]\n" + "}"; try (AvroConverter converter = new AvroConverter<>()) { @@ -161,9 +146,9 @@ public void testAvroBytesDefaultValue() throws Exception { config.put(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true"); converter.configure(config, false); - org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct() - .field("bytesTest", SchemaBuilder.bytes().optional().parameters(Map.of("lenght", "10")).defaultValue("test".getBytes()) - .build()); + org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct().field("bytesTest", + SchemaBuilder.bytes().optional().parameters(Map.of("lenght", "10")) + .defaultValue("test".getBytes()).build()); Struct struct = new Struct(sc); struct.put("bytesTest", "testingBytes".getBytes()); @@ -175,7 +160,8 @@ public void testAvroBytesDefaultValue() throws Exception { // some impl details ... TestUtils.waitForSchema(globalId -> { try { - return registryClient.ids().globalIds().byGlobalId(globalId).get().readAllBytes().length > 0; + return registryClient.ids().globalIds().byGlobalId(globalId).get() + .readAllBytes().length > 0; } catch (IOException e) { throw new RuntimeException(e); } @@ -197,8 +183,7 @@ public void testAvro() throws Exception { converter.configure(config, false); org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct() - .field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA) - .build(); + .field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA).build(); Struct struct = new Struct(sc); struct.put("bar", "somebar"); @@ -209,7 +194,8 @@ public void testAvro() throws Exception { // some impl details ... TestUtils.waitForSchema(globalId -> { try { - return registryClient.ids().globalIds().byGlobalId(globalId).get().readAllBytes().length > 0; + return registryClient.ids().globalIds().byGlobalId(globalId).get() + .readAllBytes().length > 0; } catch (IOException e) { throw new RuntimeException(e); } @@ -222,19 +208,15 @@ public void testAvro() throws Exception { @Test public void testPrettyJson() throws Exception { - testJson( - createRegistryClient(), - new PrettyFormatStrategy(), - input -> { - try { - ObjectMapper mapper = new ObjectMapper(); - JsonNode root = mapper.readTree(input); - return root.get("schemaId").asLong(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - ); + testJson(createRegistryClient(), new PrettyFormatStrategy(), input -> { + try { + ObjectMapper mapper = new ObjectMapper(); + JsonNode root = mapper.readTree(input); + return root.get("schemaId").asLong(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }); } @Test @@ -260,7 +242,6 @@ public void testConnectStruct() throws Exception { envelopeStruct.put("ts_ms", 1638362438000L); // Replace with the actual timestamp envelopeStruct.put("transaction", buildTransactionStruct()); - String subject = TestUtils.generateArtifactId(); byte[] bytes = converter.fromConnectData(subject, envelopeSchema, envelopeStruct); @@ -268,13 +249,13 @@ public void testConnectStruct() throws Exception { // some impl details ... TestUtils.waitForSchema(globalId -> { try { - return registryClient.ids().globalIds().byGlobalId(globalId).get().readAllBytes().length > 0; + return registryClient.ids().globalIds().byGlobalId(globalId).get() + .readAllBytes().length > 0; } catch (IOException e) { throw new RuntimeException(e); } }, bytes); - Struct ir = (Struct) converter.toConnectData(subject, bytes).value(); Assertions.assertEquals(envelopeStruct, ir); } @@ -282,25 +263,17 @@ public void testConnectStruct() throws Exception { private static org.apache.kafka.connect.data.Schema buildEnvelopeSchema() { // Define the Envelope schema - return SchemaBuilder.struct() - .name("dbserver1.public.aviation.Envelope") - .version(1) - .field("before", buildValueSchema()) - .field("after", buildValueSchema()) - .field("source", buildSourceSchema()) - .field("op", SchemaBuilder.STRING_SCHEMA) + return SchemaBuilder.struct().name("dbserver1.public.aviation.Envelope").version(1) + .field("before", buildValueSchema()).field("after", buildValueSchema()) + .field("source", buildSourceSchema()).field("op", SchemaBuilder.STRING_SCHEMA) .field("ts_ms", SchemaBuilder.OPTIONAL_INT64_SCHEMA) - .field("transaction", buildTransactionSchema()) - .build(); + .field("transaction", buildTransactionSchema()).build(); } private static org.apache.kafka.connect.data.Schema buildValueSchema() { // Define the Value schema - return SchemaBuilder.struct() - .name("dbserver1.public.aviation.Value") - .version(1) - .field("id", SchemaBuilder.INT32_SCHEMA) - .build(); + return SchemaBuilder.struct().name("dbserver1.public.aviation.Value").version(1) + .field("id", SchemaBuilder.INT32_SCHEMA).build(); } private static Struct buildValueStruct() { @@ -315,11 +288,8 @@ private static Struct buildValueStruct() { private static org.apache.kafka.connect.data.Schema buildSourceSchema() { // Define the Source schema - return SchemaBuilder.struct() - .name("io.debezium.connector.postgresql.Source") - .version(1) - .field("id", SchemaBuilder.STRING_SCHEMA) - .field("version", SchemaBuilder.STRING_SCHEMA) + return SchemaBuilder.struct().name("io.debezium.connector.postgresql.Source").version(1) + .field("id", SchemaBuilder.STRING_SCHEMA).field("version", SchemaBuilder.STRING_SCHEMA) .build(); } @@ -336,10 +306,7 @@ private static Struct buildSourceStruct() { private static org.apache.kafka.connect.data.Schema buildTransactionSchema() { // Define the Transaction schema - return SchemaBuilder.struct() - .name("event.block") - .version(1) - .field("id", SchemaBuilder.STRING_SCHEMA) + return SchemaBuilder.struct().name("event.block").version(1).field("id", SchemaBuilder.STRING_SCHEMA) .build(); } @@ -355,17 +322,14 @@ private static Struct buildTransactionStruct() { @Test public void testCompactJson() throws Exception { - testJson( - createRegistryClient(), - new CompactFormatStrategy(), - input -> { - ByteBuffer buffer = AbstractKafkaSerDe.getByteBuffer(input); - return buffer.getLong(); - } - ); + testJson(createRegistryClient(), new CompactFormatStrategy(), input -> { + ByteBuffer buffer = AbstractKafkaSerDe.getByteBuffer(input); + return buffer.getLong(); + }); } - private void testJson(RegistryClient restClient, FormatStrategy formatStrategy, Function fn) throws Exception { + private void testJson(RegistryClient restClient, FormatStrategy formatStrategy, Function fn) + throws Exception { try (ExtJsonConverter converter = new ExtJsonConverter(restClient)) { converter.setFormatStrategy(formatStrategy); Map config = new HashMap<>(); @@ -373,8 +337,7 @@ private void testJson(RegistryClient restClient, FormatStrategy formatStrategy, converter.configure(config, false); org.apache.kafka.connect.data.Schema sc = SchemaBuilder.struct() - .field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA) - .build(); + .field("bar", org.apache.kafka.connect.data.Schema.STRING_SCHEMA).build(); Struct struct = new Struct(sc); struct.put("bar", "somebar"); @@ -389,7 +352,7 @@ private void testJson(RegistryClient restClient, FormatStrategy formatStrategy, } }, bytes, fn); - //noinspection rawtypes + // noinspection rawtypes Struct ir = (Struct) converter.toConnectData("extjson", bytes).value(); Assertions.assertEquals("somebar", ir.get("bar").toString()); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/kafkasql/KafkaSqlSnapshottingIT.java b/integration-tests/src/test/java/io/apicurio/tests/kafkasql/KafkaSqlSnapshottingIT.java index 3f8bce69f9..024a24d205 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/kafkasql/KafkaSqlSnapshottingIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/kafkasql/KafkaSqlSnapshottingIT.java @@ -21,10 +21,12 @@ public void cleanArtifacts() throws Exception { @Test public void testRecoverFromSnapshot() throws InterruptedException { - //We expect 1000 artifacts to be present in the snapshots group, created before the snapshot. - Assertions.assertEquals(1000, registryClient.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().get().getCount()); + // We expect 1000 artifacts to be present in the snapshots group, created before the snapshot. + Assertions.assertEquals(1000, registryClient.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID) + .artifacts().get().getCount()); - //And another 1000 in the default group, created after the snapshot. - Assertions.assertEquals(1000, registryClient.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID).artifacts().get().getCount()); + // And another 1000 in the default group, created after the snapshot. + Assertions.assertEquals(1000, registryClient.groups().byGroupId(NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID) + .artifacts().get().getCount()); } } \ No newline at end of file diff --git a/integration-tests/src/test/java/io/apicurio/tests/migration/DataMigrationIT.java b/integration-tests/src/test/java/io/apicurio/tests/migration/DataMigrationIT.java index d032daa4ff..a8cd2c9f24 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/migration/DataMigrationIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/migration/DataMigrationIT.java @@ -1,7 +1,6 @@ package io.apicurio.tests.migration; - - +import io.apicurio.registry.client.auth.VertXAuthFactory; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rest.client.models.ArtifactReference; import io.apicurio.registry.types.RuleType; @@ -12,7 +11,6 @@ import io.kiota.http.vertx.VertXRequestAdapter; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.junit.QuarkusIntegrationTest; -import io.apicurio.registry.client.auth.VertXAuthFactory; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Tag; @@ -47,7 +45,8 @@ public class DataMigrationIT extends ApicurioRegistryBaseIT { public static Map doNotPreserveIdsImportArtifacts = new HashMap<>(); /** - * The data required for this test is initialized by MigrationTestsDataInitializer.initializeMigrateTest(RegistryClient) + * The data required for this test is initialized by + * MigrationTestsDataInitializer.initializeMigrateTest(RegistryClient) * * @throws Exception */ @@ -66,12 +65,15 @@ public void migrate() throws Exception { dest.ids().globalIds().byGlobalId(gid).get(); if (migrateReferencesMap.containsKey(gid)) { List srcReferences = migrateReferencesMap.get(gid); - List destReferences = dest.ids().globalIds().byGlobalId(gid).references().get(); + List destReferences = dest.ids().globalIds().byGlobalId(gid) + .references().get(); assertTrue(matchesReferences(srcReferences, destReferences)); } } - assertEquals("SYNTAX_ONLY", dest.groups().byGroupId("migrateTest").artifacts().byArtifactId("avro-0").rules().byRuleType(RuleType.VALIDITY.name()).get().getConfig()); - assertEquals("BACKWARD", dest.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get().getConfig()); + assertEquals("SYNTAX_ONLY", dest.groups().byGroupId("migrateTest").artifacts() + .byArtifactId("avro-0").rules().byRuleType(RuleType.VALIDITY.name()).get().getConfig()); + assertEquals("BACKWARD", + dest.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get().getConfig()); }); } @@ -84,14 +86,15 @@ public static class MigrateTestInitializer extends AbstractTestDataInitializer { @Override public Map start() { // TODO we will need to change this to 3.0.0 whenever that is released! - String registryBaseUrl = startRegistryApplication("quay.io/apicurio/apicurio-registry:latest-snapshot"); + String registryBaseUrl = startRegistryApplication( + "quay.io/apicurio/apicurio-registry:latest-snapshot"); var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); adapter.setBaseUrl(registryBaseUrl); RegistryClient source = new RegistryClient(adapter); try { - //Warm up until the source registry is ready. + // Warm up until the source registry is ready. TestUtils.retry(() -> { source.groups().byGroupId("default").artifacts().get(); }); diff --git a/integration-tests/src/test/java/io/apicurio/tests/migration/DoNotPreserveIdsImportIT.java b/integration-tests/src/test/java/io/apicurio/tests/migration/DoNotPreserveIdsImportIT.java index 1d2504499f..0153543887 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/migration/DoNotPreserveIdsImportIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/migration/DoNotPreserveIdsImportIT.java @@ -1,6 +1,5 @@ package io.apicurio.tests.migration; - import io.apicurio.registry.client.auth.VertXAuthFactory; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rest.client.models.CreateArtifact; @@ -46,7 +45,7 @@ public class DoNotPreserveIdsImportIT extends ApicurioRegistryBaseIT { @Override public void cleanArtifacts() throws Exception { - //Don't clean up + // Don't clean up } @Test @@ -55,46 +54,57 @@ public void testDoNotPreserveIdsImport() throws Exception { adapter.setBaseUrl(ApicurioRegistryBaseIT.getRegistryV3ApiUrl()); RegistryClient dest = new RegistryClient(adapter); - // Fill the destination registry with data (Avro content is inserted first to ensure that the content IDs are different) + // Fill the destination registry with data (Avro content is inserted first to ensure that the content + // IDs are different) for (int idx = 0; idx < 15; idx++) { - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(List.of("a" + idx)); - String artifactId = "avro-" + idx + "-" + UUID.randomUUID().toString(); // Artifact ids need to be different we do not support identical artifact ids + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory( + List.of("a" + idx)); + String artifactId = "avro-" + idx + "-" + UUID.randomUUID().toString(); // Artifact ids need to be + // different we do not + // support identical + // artifact ids String content = IoUtil.toString(avroSchema.generateSchemaStream()); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON); - var response = dest.groups().byGroupId("testDoNotPreserveIdsImport").artifacts().post(createArtifact); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + content, ContentTypes.APPLICATION_JSON); + var response = dest.groups().byGroupId("testDoNotPreserveIdsImport").artifacts() + .post(createArtifact); retry(() -> dest.ids().globalIds().byGlobalId(response.getVersion().getGlobalId())); doNotPreserveIdsImportArtifacts.put("testDoNotPreserveIdsImport:" + artifactId, content); } for (int idx = 0; idx < 50; idx++) { - String artifactId = idx + "-" + UUID.randomUUID().toString(); // Artifact ids need to be different we do not support identical artifact ids + String artifactId = idx + "-" + UUID.randomUUID().toString(); // Artifact ids need to be different + // we do not support identical + // artifact ids String content = IoUtil.toString(jsonSchema.getSchemaStream()); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, content, ContentTypes.APPLICATION_JSON); - var response = dest.groups().byGroupId("testDoNotPreserveIdsImport").artifacts().post(createArtifact, config -> { - config.headers.add("X-Registry-ArtifactId", artifactId); - }); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + content, ContentTypes.APPLICATION_JSON); + var response = dest.groups().byGroupId("testDoNotPreserveIdsImport").artifacts() + .post(createArtifact, config -> { + config.headers.add("X-Registry-ArtifactId", artifactId); + }); retry(() -> dest.ids().globalIds().byGlobalId(response.getVersion().getGlobalId())); doNotPreserveIdsImportArtifacts.put("testDoNotPreserveIdsImport:" + artifactId, content); } // Import the data - var importReq = dest.admin().importEscaped().toPostRequestInformation(doNotPreserveIdsImportDataToImport, config -> { - config.headers.add("X-Registry-Preserve-GlobalId", "false"); - config.headers.add("X-Registry-Preserve-ContentId", "false"); - }); + var importReq = dest.admin().importEscaped() + .toPostRequestInformation(doNotPreserveIdsImportDataToImport, config -> { + config.headers.add("X-Registry-Preserve-GlobalId", "false"); + config.headers.add("X-Registry-Preserve-ContentId", "false"); + }); importReq.headers.replace("Content-Type", Set.of("application/zip")); adapter.sendPrimitive(importReq, new HashMap<>(), Void.class); - - // Check that the import was successful retry(() -> { for (var entry : doNotPreserveIdsImportArtifacts.entrySet()) { String groupId = entry.getKey().split(":")[0]; String artifactId = entry.getKey().split(":")[1]; String content = entry.getValue(); - var registryContent = dest.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + var registryContent = dest.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get(); assertNotNull(registryContent); assertEquals(content, IoUtil.toString(registryContent)); } @@ -106,13 +116,14 @@ public static class DoNotPreserveIdsInitializer extends AbstractTestDataInitiali @Override public Map start() { - String registryBaseUrl = startRegistryApplication("quay.io/apicurio/apicurio-registry-mem:2.4.14.Final"); + String registryBaseUrl = startRegistryApplication( + "quay.io/apicurio/apicurio-registry-mem:2.4.14.Final"); var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx); adapter.setBaseUrl(registryBaseUrl); RegistryClient source = new RegistryClient(adapter); try { - //Warm up until the source registry is ready. + // Warm up until the source registry is ready. TestUtils.retry(() -> { source.groups().byGroupId("default").artifacts().get(); }); diff --git a/integration-tests/src/test/java/io/apicurio/tests/migration/GenerateCanonicalHashImportIT.java b/integration-tests/src/test/java/io/apicurio/tests/migration/GenerateCanonicalHashImportIT.java index b1c9838967..985cf19458 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/migration/GenerateCanonicalHashImportIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/migration/GenerateCanonicalHashImportIT.java @@ -56,7 +56,8 @@ public void testGeneratingCanonicalHashOnImport() throws Exception { String content = jsonSchema.getSchemaString(); artifacts.put(artifactId, content); } - var importReq = client.admin().importEscaped().toPostRequestInformation(generateExportedZip(artifacts)); + var importReq = client.admin().importEscaped() + .toPostRequestInformation(generateExportedZip(artifacts)); importReq.headers.replace("Content-Type", Set.of("application/zip")); adapter.sendPrimitive(importReq, new HashMap<>(), Void.class); @@ -66,12 +67,13 @@ public void testGeneratingCanonicalHashOnImport() throws Exception { String content = entry.getValue(); /* - TODO: Check if the canonical hash is generated correctly. - The only way is to generate canonical hash and then search artifact by it. But that needs apicurio-registry-app module as dependency. + * TODO: Check if the canonical hash is generated correctly. The only way is to generate canonical + * hash and then search artifact by it. But that needs apicurio-registry-app module as dependency. */ try { - var registryContent = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1.0").content().get(); + var registryContent = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1.0").content().get(); assertNotNull(registryContent); assertEquals(content, IoUtil.toString(registryContent)); } catch (Error e) { @@ -152,13 +154,8 @@ public InputStream generateExportedZip(Map artifacts) { writer.writeEntity(versionEntity); - writer.writeEntity( - BranchEntity.builder() - .artifactId(artifactId) - .branchId(BranchId.LATEST.getRawBranchId()) - .versions(List.of("1")) - .build() - ); + writer.writeEntity(BranchEntity.builder().artifactId(artifactId) + .branchId(BranchId.LATEST.getRawBranchId()).versions(List.of("1")).build()); } zip.flush(); diff --git a/integration-tests/src/test/java/io/apicurio/tests/migration/MigrationTestsDataInitializer.java b/integration-tests/src/test/java/io/apicurio/tests/migration/MigrationTestsDataInitializer.java index 251f8b486b..836cd96bb7 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/migration/MigrationTestsDataInitializer.java +++ b/integration-tests/src/test/java/io/apicurio/tests/migration/MigrationTestsDataInitializer.java @@ -52,34 +52,47 @@ public static void initializeMigrateTest(RegistryClient source, String registryB String artifactId = idx + "-" + UUID.randomUUID().toString(); CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, - new String(jsonSchema.getSchemaStream().readAllBytes(), StandardCharsets.UTF_8), ContentTypes.APPLICATION_JSON); + new String(jsonSchema.getSchemaStream().readAllBytes(), StandardCharsets.UTF_8), + ContentTypes.APPLICATION_JSON); var response = source.groups().byGroupId("default").artifacts().post(createArtifact); TestUtils.retry(() -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId())); migrateGlobalIds.add(response.getVersion().getGlobalId()); } for (int idx = 0; idx < 15; idx++) { - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(List.of("a" + idx)); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory( + List.of("a" + idx)); String artifactId = "avro-" + idx; - List references = idx > 0 ? getSingletonRefList("migrateTest", "avro-" + (idx - 1), "1", "myRef" + idx) : Collections.emptyList(); + List references = idx > 0 + ? getSingletonRefList("migrateTest", "avro-" + (idx - 1), "1", "myRef" + idx) + : Collections.emptyList(); CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, - new String(avroSchema.generateSchemaStream().readAllBytes(), StandardCharsets.UTF_8), ContentTypes.APPLICATION_JSON); + new String(avroSchema.generateSchemaStream().readAllBytes(), StandardCharsets.UTF_8), + ContentTypes.APPLICATION_JSON); createArtifact.getFirstVersion().getContent().setReferences(references); var response = source.groups().byGroupId("migrateTest").artifacts().post(createArtifact); - TestUtils.retry(() -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); - assertTrue(matchesReferences(references, source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).references().get())); + TestUtils.retry( + () -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); + assertTrue(matchesReferences(references, source.ids().globalIds() + .byGlobalId(response.getVersion().getGlobalId()).references().get())); migrateReferencesMap.put(response.getVersion().getGlobalId(), references); migrateGlobalIds.add(response.getVersion().getGlobalId()); avroSchema = new AvroGenericRecordSchemaFactory(List.of("u" + idx)); - List updatedReferences = idx > 0 ? getSingletonRefList("migrateTest", "avro-" + (idx - 1), "2", "myRef" + idx) : Collections.emptyList(); - CreateVersion createVersion = TestUtils.clientCreateVersion(new String(avroSchema.generateSchemaStream().readAllBytes(), StandardCharsets.UTF_8), ContentTypes.APPLICATION_JSON); + List updatedReferences = idx > 0 + ? getSingletonRefList("migrateTest", "avro-" + (idx - 1), "2", "myRef" + idx) + : Collections.emptyList(); + CreateVersion createVersion = TestUtils.clientCreateVersion( + new String(avroSchema.generateSchemaStream().readAllBytes(), StandardCharsets.UTF_8), + ContentTypes.APPLICATION_JSON); createVersion.getContent().setReferences(updatedReferences); - var vmd = source.groups().byGroupId("migrateTest").artifacts().byArtifactId(artifactId).versions().post(createVersion); + var vmd = source.groups().byGroupId("migrateTest").artifacts().byArtifactId(artifactId).versions() + .post(createVersion); TestUtils.retry(() -> source.ids().globalIds().byGlobalId(vmd.getGlobalId())); - assertTrue(matchesReferences(updatedReferences, source.ids().globalIds().byGlobalId(vmd.getGlobalId()).references().get())); + assertTrue(matchesReferences(updatedReferences, + source.ids().globalIds().byGlobalId(vmd.getGlobalId()).references().get())); migrateReferencesMap.put(vmd.getGlobalId(), updatedReferences); migrateGlobalIds.add(vmd.getGlobalId()); } @@ -95,46 +108,61 @@ public static void initializeMigrateTest(RegistryClient source, String registryB var downloadHref = source.admin().export().get().getHref(); OkHttpClient client = new OkHttpClient(); - DataMigrationIT.migrateDataToImport = client.newCall(new Request.Builder().url(registryBaseUrl + downloadHref).build()).execute().body().byteStream(); + DataMigrationIT.migrateDataToImport = client + .newCall(new Request.Builder().url(registryBaseUrl + downloadHref).build()).execute().body() + .byteStream(); } - public static void initializeDoNotPreserveIdsImport(RegistryClient source, String registryBaseUrl) throws Exception { + public static void initializeDoNotPreserveIdsImport(RegistryClient source, String registryBaseUrl) + throws Exception { // Fill the source registry with data JsonSchemaMsgFactory jsonSchema = new JsonSchemaMsgFactory(); for (int idx = 0; idx < 50; idx++) { String artifactId = idx + "-" + UUID.randomUUID().toString(); String content = IoUtil.toString(jsonSchema.getSchemaStream()); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, content, ContentTypes.APPLICATION_JSON); - var response = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts().post(createArtifact); - TestUtils.retry(() -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.JSON, + content, ContentTypes.APPLICATION_JSON); + var response = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts() + .post(createArtifact); + TestUtils.retry( + () -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); doNotPreserveIdsImportArtifacts.put("testDoNotPreserveIdsImport:" + artifactId, content); } for (int idx = 0; idx < 15; idx++) { - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(List.of("a" + idx)); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory( + List.of("a" + idx)); String artifactId = "avro-" + idx + "-" + UUID.randomUUID().toString(); String content = IoUtil.toString(avroSchema.generateSchemaStream()); - CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON); - var response = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts().post(createArtifact); - TestUtils.retry(() -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); + CreateArtifact createArtifact = TestUtils.clientCreateArtifact(artifactId, ArtifactType.AVRO, + content, ContentTypes.APPLICATION_JSON); + var response = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts() + .post(createArtifact); + TestUtils.retry( + () -> source.ids().globalIds().byGlobalId(response.getVersion().getGlobalId()).get()); doNotPreserveIdsImportArtifacts.put("testDoNotPreserveIdsImport:" + artifactId, content); avroSchema = new AvroGenericRecordSchemaFactory(List.of("u" + idx)); String content2 = IoUtil.toString(avroSchema.generateSchemaStream()); - CreateVersion createVersion = TestUtils.clientCreateVersion(content2, ContentTypes.APPLICATION_JSON); - var vmd = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(content2, + ContentTypes.APPLICATION_JSON); + var vmd = source.groups().byGroupId("testDoNotPreserveIdsImport").artifacts() + .byArtifactId(artifactId).versions().post(createVersion); TestUtils.retry(() -> source.ids().globalIds().byGlobalId(vmd.getGlobalId()).get()); doNotPreserveIdsImportArtifacts.put("testDoNotPreserveIdsImport:" + artifactId, content2); } var downloadHref = source.admin().export().get().getHref(); OkHttpClient client = new OkHttpClient(); - DoNotPreserveIdsImportIT.doNotPreserveIdsImportDataToImport = client.newCall(new Request.Builder().url(registryBaseUrl + downloadHref).build()).execute().body().byteStream(); + DoNotPreserveIdsImportIT.doNotPreserveIdsImportDataToImport = client + .newCall(new Request.Builder().url(registryBaseUrl + downloadHref).build()).execute().body() + .byteStream(); DoNotPreserveIdsImportIT.jsonSchema = jsonSchema; } - protected static List getSingletonRefList(String groupId, String artifactId, String version, String name) { + protected static List getSingletonRefList(String groupId, String artifactId, + String version, String name) { ArtifactReference artifactReference = new ArtifactReference(); artifactReference.setGroupId(groupId); artifactReference.setArtifactId(artifactId); @@ -143,14 +171,14 @@ protected static List getSingletonRefList(String groupId, Str return Collections.singletonList(artifactReference); } - public static boolean matchesReferences(List srcReferences, List destReferences) { - return destReferences.size() == srcReferences.size() && destReferences.stream().allMatch( - srcRef -> srcReferences.stream().anyMatch(destRef -> - Objects.equals(srcRef.getGroupId(), destRef.getGroupId()) && - Objects.equals(srcRef.getArtifactId(), destRef.getArtifactId()) && - Objects.equals(srcRef.getVersion(), destRef.getVersion()) && - Objects.equals(srcRef.getName(), destRef.getName())) - ); + public static boolean matchesReferences(List srcReferences, + List destReferences) { + return destReferences.size() == srcReferences.size() && destReferences.stream() + .allMatch(srcRef -> srcReferences.stream() + .anyMatch(destRef -> Objects.equals(srcRef.getGroupId(), destRef.getGroupId()) + && Objects.equals(srcRef.getArtifactId(), destRef.getArtifactId()) + && Objects.equals(srcRef.getVersion(), destRef.getVersion()) + && Objects.equals(srcRef.getName(), destRef.getName()))); } public InputStream generateExportedZip(Map artifacts) { diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Header.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Header.java index 05ab3ceeba..de8bd1e304 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Header.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Header.java @@ -6,1487 +6,1514 @@ /** * Protobuf type {@code io.apicurio.tests.protobuf.Header} */ -public final class Header extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.Header) +public final class Header extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.Header) HeaderOrBuilder { -private static final long serialVersionUID = 0L; - // Use Header.newBuilder() to construct. - private Header(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Header() { - source_ = ""; - destination_ = ""; - uuid_ = ""; - sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; - messageTypeId_ = ""; - rawMessage_ = ""; - } - - @Override - @SuppressWarnings({"unused"}) - protected Object newInstance( - UnusedPrivateParameter unused) { - return new Header(); - } - - @Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private Header( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - com.google.protobuf.Timestamp.Builder subBuilder = null; - if (time_ != null) { - subBuilder = time_.toBuilder(); - } - time_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(time_); - time_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - source_ = s; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - - destination_ = s; - break; - } - case 34: { - String s = input.readStringRequireUtf8(); - - uuid_ = s; - break; - } - case 42: { - String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - sourceUuids_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - sourceUuids_.add(s); - break; - } - case 50: { - String s = input.readStringRequireUtf8(); - - messageTypeId_ = s; - break; - } - case 58: { - String s = input.readStringRequireUtf8(); - - rawMessage_ = s; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - sourceUuids_ = sourceUuids_.getUnmodifiableView(); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable - .ensureFieldAccessorsInitialized( - Header.class, Builder.class); - } - - public static final int TIME_FIELD_NUMBER = 1; - private com.google.protobuf.Timestamp time_; - /** - * .google.protobuf.Timestamp time = 1; - * @return Whether the time field is set. - */ - @Override - public boolean hasTime() { - return time_ != null; - } - /** - * .google.protobuf.Timestamp time = 1; - * @return The time. - */ - @Override - public com.google.protobuf.Timestamp getTime() { - return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_; - } - /** - * .google.protobuf.Timestamp time = 1; - */ - @Override - public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() { - return getTime(); - } - - public static final int SOURCE_FIELD_NUMBER = 2; - private volatile Object source_; - /** - * string source = 2; - * @return The source. - */ - @Override - public String getSource() { - Object ref = source_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - source_ = s; - return s; - } - } - /** - * string source = 2; - * @return The bytes for source. - */ - @Override - public com.google.protobuf.ByteString - getSourceBytes() { - Object ref = source_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - source_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int DESTINATION_FIELD_NUMBER = 3; - private volatile Object destination_; - /** - * string destination = 3; - * @return The destination. - */ - @Override - public String getDestination() { - Object ref = destination_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - destination_ = s; - return s; - } - } - /** - * string destination = 3; - * @return The bytes for destination. - */ - @Override - public com.google.protobuf.ByteString - getDestinationBytes() { - Object ref = destination_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int UUID_FIELD_NUMBER = 4; - private volatile Object uuid_; - /** - * string uuid = 4; - * @return The uuid. - */ - @Override - public String getUuid() { - Object ref = uuid_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - uuid_ = s; - return s; - } - } - /** - * string uuid = 4; - * @return The bytes for uuid. - */ - @Override - public com.google.protobuf.ByteString - getUuidBytes() { - Object ref = uuid_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - uuid_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int SOURCE_UUIDS_FIELD_NUMBER = 5; - private com.google.protobuf.LazyStringList sourceUuids_; - /** - * repeated string source_uuids = 5; - * @return A list containing the sourceUuids. - */ - public com.google.protobuf.ProtocolStringList - getSourceUuidsList() { - return sourceUuids_; - } - /** - * repeated string source_uuids = 5; - * @return The count of sourceUuids. - */ - public int getSourceUuidsCount() { - return sourceUuids_.size(); - } - /** - * repeated string source_uuids = 5; - * @param index The index of the element to return. - * @return The sourceUuids at the given index. - */ - public String getSourceUuids(int index) { - return sourceUuids_.get(index); - } - /** - * repeated string source_uuids = 5; - * @param index The index of the value to return. - * @return The bytes of the sourceUuids at the given index. - */ - public com.google.protobuf.ByteString - getSourceUuidsBytes(int index) { - return sourceUuids_.getByteString(index); - } - - public static final int MESSAGE_TYPE_ID_FIELD_NUMBER = 6; - private volatile Object messageTypeId_; - /** - * string message_type_id = 6; - * @return The messageTypeId. - */ - @Override - public String getMessageTypeId() { - Object ref = messageTypeId_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - messageTypeId_ = s; - return s; - } - } - /** - * string message_type_id = 6; - * @return The bytes for messageTypeId. - */ - @Override - public com.google.protobuf.ByteString - getMessageTypeIdBytes() { - Object ref = messageTypeId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - messageTypeId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int RAW_MESSAGE_FIELD_NUMBER = 7; - private volatile Object rawMessage_; - /** - * string raw_message = 7; - * @return The rawMessage. - */ - @Override - public String getRawMessage() { - Object ref = rawMessage_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - rawMessage_ = s; - return s; - } - } - /** - * string raw_message = 7; - * @return The bytes for rawMessage. - */ - @Override - public com.google.protobuf.ByteString - getRawMessageBytes() { - Object ref = rawMessage_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - rawMessage_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - @Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (time_ != null) { - output.writeMessage(1, getTime()); - } - if (!getSourceBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, source_); - } - if (!getDestinationBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, destination_); - } - if (!getUuidBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 4, uuid_); - } - for (int i = 0; i < sourceUuids_.size(); i++) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, sourceUuids_.getRaw(i)); - } - if (!getMessageTypeIdBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, messageTypeId_); - } - if (!getRawMessageBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 7, rawMessage_); - } - unknownFields.writeTo(output); - } - - @Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (time_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getTime()); - } - if (!getSourceBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, source_); - } - if (!getDestinationBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, destination_); - } - if (!getUuidBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, uuid_); - } - { - int dataSize = 0; - for (int i = 0; i < sourceUuids_.size(); i++) { - dataSize += computeStringSizeNoTag(sourceUuids_.getRaw(i)); - } - size += dataSize; - size += 1 * getSourceUuidsList().size(); - } - if (!getMessageTypeIdBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, messageTypeId_); - } - if (!getRawMessageBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, rawMessage_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @Override - public boolean equals(final Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof Header)) { - return super.equals(obj); - } - Header other = (Header) obj; - - if (hasTime() != other.hasTime()) return false; - if (hasTime()) { - if (!getTime() - .equals(other.getTime())) return false; - } - if (!getSource() - .equals(other.getSource())) return false; - if (!getDestination() - .equals(other.getDestination())) return false; - if (!getUuid() - .equals(other.getUuid())) return false; - if (!getSourceUuidsList() - .equals(other.getSourceUuidsList())) return false; - if (!getMessageTypeId() - .equals(other.getMessageTypeId())) return false; - if (!getRawMessage() - .equals(other.getRawMessage())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasTime()) { - hash = (37 * hash) + TIME_FIELD_NUMBER; - hash = (53 * hash) + getTime().hashCode(); - } - hash = (37 * hash) + SOURCE_FIELD_NUMBER; - hash = (53 * hash) + getSource().hashCode(); - hash = (37 * hash) + DESTINATION_FIELD_NUMBER; - hash = (53 * hash) + getDestination().hashCode(); - hash = (37 * hash) + UUID_FIELD_NUMBER; - hash = (53 * hash) + getUuid().hashCode(); - if (getSourceUuidsCount() > 0) { - hash = (37 * hash) + SOURCE_UUIDS_FIELD_NUMBER; - hash = (53 * hash) + getSourceUuidsList().hashCode(); - } - hash = (37 * hash) + MESSAGE_TYPE_ID_FIELD_NUMBER; - hash = (53 * hash) + getMessageTypeId().hashCode(); - hash = (37 * hash) + RAW_MESSAGE_FIELD_NUMBER; - hash = (53 * hash) + getRawMessage().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static Header parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Header parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Header parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Header parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Header parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Header parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Header parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static Header parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static Header parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static Header parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static Header parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static Header parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(Header prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @Override - protected Builder newBuilderForType( - BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code io.apicurio.tests.protobuf.Header} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.Header) - HeaderOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable - .ensureFieldAccessorsInitialized( - Header.class, Builder.class); - } - - // Construct using io.apicurio.tests.protobuf.Header.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @Override - public Builder clear() { - super.clear(); - if (timeBuilder_ == null) { - time_ = null; - } else { - time_ = null; - timeBuilder_ = null; - } - source_ = ""; - - destination_ = ""; - - uuid_ = ""; - - sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - messageTypeId_ = ""; - - rawMessage_ = ""; - - return this; - } + private static final long serialVersionUID = 0L; - @Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; - } - - @Override - public Header getDefaultInstanceForType() { - return Header.getDefaultInstance(); + // Use Header.newBuilder() to construct. + private Header(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); } - @Override - public Header build() { - Header result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; + private Header() { + source_ = ""; + destination_ = ""; + uuid_ = ""; + sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; + messageTypeId_ = ""; + rawMessage_ = ""; } @Override - public Header buildPartial() { - Header result = new Header(this); - int from_bitField0_ = bitField0_; - if (timeBuilder_ == null) { - result.time_ = time_; - } else { - result.time_ = timeBuilder_.build(); - } - result.source_ = source_; - result.destination_ = destination_; - result.uuid_ = uuid_; - if (((bitField0_ & 0x00000001) != 0)) { - sourceUuids_ = sourceUuids_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.sourceUuids_ = sourceUuids_; - result.messageTypeId_ = messageTypeId_; - result.rawMessage_ = rawMessage_; - onBuilt(); - return result; + @SuppressWarnings({ "unused" }) + protected Object newInstance(UnusedPrivateParameter unused) { + return new Header(); } @Override - public Builder clone() { - return super.clone(); - } - @Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.setField(field, value); - } - @Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return super.setRepeatedField(field, index, value); - } - @Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.addRepeatedField(field, value); - } - @Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof Header) { - return mergeFrom((Header)other); - } else { - super.mergeFrom(other); - return this; - } + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; } - public Builder mergeFrom(Header other) { - if (other == Header.getDefaultInstance()) return this; - if (other.hasTime()) { - mergeTime(other.getTime()); - } - if (!other.getSource().isEmpty()) { - source_ = other.source_; - onChanged(); - } - if (!other.getDestination().isEmpty()) { - destination_ = other.destination_; - onChanged(); - } - if (!other.getUuid().isEmpty()) { - uuid_ = other.uuid_; - onChanged(); - } - if (!other.sourceUuids_.isEmpty()) { - if (sourceUuids_.isEmpty()) { - sourceUuids_ = other.sourceUuids_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureSourceUuidsIsMutable(); - sourceUuids_.addAll(other.sourceUuids_); - } - onChanged(); - } - if (!other.getMessageTypeId().isEmpty()) { - messageTypeId_ = other.messageTypeId_; - onChanged(); - } - if (!other.getRawMessage().isEmpty()) { - rawMessage_ = other.rawMessage_; - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; + private Header(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (time_ != null) { + subBuilder = time_.toBuilder(); + } + time_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(time_); + time_ = subBuilder.buildPartial(); + } + + break; + } + case 18: { + String s = input.readStringRequireUtf8(); + + source_ = s; + break; + } + case 26: { + String s = input.readStringRequireUtf8(); + + destination_ = s; + break; + } + case 34: { + String s = input.readStringRequireUtf8(); + + uuid_ = s; + break; + } + case 42: { + String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + sourceUuids_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + sourceUuids_.add(s); + break; + } + case 50: { + String s = input.readStringRequireUtf8(); + + messageTypeId_ = s; + break; + } + case 58: { + String s = input.readStringRequireUtf8(); + + rawMessage_ = s; + break; + } + default: { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + sourceUuids_ = sourceUuids_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } } - @Override - public final boolean isInitialized() { - return true; + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; } @Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Header parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (Header) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable + .ensureFieldAccessorsInitialized(Header.class, Builder.class); } - private int bitField0_; + public static final int TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp time_; - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timeBuilder_; + /** * .google.protobuf.Timestamp time = 1; + * * @return Whether the time field is set. */ + @Override public boolean hasTime() { - return timeBuilder_ != null || time_ != null; + return time_ != null; } + /** * .google.protobuf.Timestamp time = 1; + * * @return The time. */ + @Override public com.google.protobuf.Timestamp getTime() { - if (timeBuilder_ == null) { return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_; - } else { - return timeBuilder_.getMessage(); - } - } - /** - * .google.protobuf.Timestamp time = 1; - */ - public Builder setTime(com.google.protobuf.Timestamp value) { - if (timeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - time_ = value; - onChanged(); - } else { - timeBuilder_.setMessage(value); - } - - return this; - } - /** - * .google.protobuf.Timestamp time = 1; - */ - public Builder setTime( - com.google.protobuf.Timestamp.Builder builderForValue) { - if (timeBuilder_ == null) { - time_ = builderForValue.build(); - onChanged(); - } else { - timeBuilder_.setMessage(builderForValue.build()); - } - - return this; } - /** - * .google.protobuf.Timestamp time = 1; - */ - public Builder mergeTime(com.google.protobuf.Timestamp value) { - if (timeBuilder_ == null) { - if (time_ != null) { - time_ = - com.google.protobuf.Timestamp.newBuilder(time_).mergeFrom(value).buildPartial(); - } else { - time_ = value; - } - onChanged(); - } else { - timeBuilder_.mergeFrom(value); - } - return this; - } - /** - * .google.protobuf.Timestamp time = 1; - */ - public Builder clearTime() { - if (timeBuilder_ == null) { - time_ = null; - onChanged(); - } else { - time_ = null; - timeBuilder_ = null; - } - - return this; - } - /** - * .google.protobuf.Timestamp time = 1; - */ - public com.google.protobuf.Timestamp.Builder getTimeBuilder() { - - onChanged(); - return getTimeFieldBuilder().getBuilder(); - } /** * .google.protobuf.Timestamp time = 1; */ + @Override public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() { - if (timeBuilder_ != null) { - return timeBuilder_.getMessageOrBuilder(); - } else { - return time_ == null ? - com.google.protobuf.Timestamp.getDefaultInstance() : time_; - } - } - /** - * .google.protobuf.Timestamp time = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> - getTimeFieldBuilder() { - if (timeBuilder_ == null) { - timeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( - getTime(), - getParentForChildren(), - isClean()); - time_ = null; - } - return timeBuilder_; + return getTime(); } - private Object source_ = ""; + public static final int SOURCE_FIELD_NUMBER = 2; + private volatile Object source_; + /** * string source = 2; + * * @return The source. */ + @Override public String getSource() { - Object ref = source_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - source_ = s; - return s; - } else { - return (String) ref; - } + Object ref = source_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + source_ = s; + return s; + } } + /** * string source = 2; + * * @return The bytes for source. */ - public com.google.protobuf.ByteString - getSourceBytes() { - Object ref = source_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - source_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string source = 2; - * @param value The source to set. - * @return This builder for chaining. - */ - public Builder setSource( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - source_ = value; - onChanged(); - return this; - } - /** - * string source = 2; - * @return This builder for chaining. - */ - public Builder clearSource() { - - source_ = getDefaultInstance().getSource(); - onChanged(); - return this; - } - /** - * string source = 2; - * @param value The bytes for source to set. - * @return This builder for chaining. - */ - public Builder setSourceBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - source_ = value; - onChanged(); - return this; + @Override + public com.google.protobuf.ByteString getSourceBytes() { + Object ref = source_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + source_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - private Object destination_ = ""; + public static final int DESTINATION_FIELD_NUMBER = 3; + private volatile Object destination_; + /** * string destination = 3; + * * @return The destination. */ + @Override public String getDestination() { - Object ref = destination_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - destination_ = s; - return s; - } else { - return (String) ref; - } + Object ref = destination_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + destination_ = s; + return s; + } } + /** * string destination = 3; + * * @return The bytes for destination. */ - public com.google.protobuf.ByteString - getDestinationBytes() { - Object ref = destination_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string destination = 3; - * @param value The destination to set. - * @return This builder for chaining. - */ - public Builder setDestination( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - destination_ = value; - onChanged(); - return this; - } - /** - * string destination = 3; - * @return This builder for chaining. - */ - public Builder clearDestination() { - - destination_ = getDefaultInstance().getDestination(); - onChanged(); - return this; - } - /** - * string destination = 3; - * @param value The bytes for destination to set. - * @return This builder for chaining. - */ - public Builder setDestinationBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - destination_ = value; - onChanged(); - return this; + @Override + public com.google.protobuf.ByteString getDestinationBytes() { + Object ref = destination_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + destination_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - private Object uuid_ = ""; + public static final int UUID_FIELD_NUMBER = 4; + private volatile Object uuid_; + /** * string uuid = 4; + * * @return The uuid. */ + @Override public String getUuid() { - Object ref = uuid_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - uuid_ = s; - return s; - } else { - return (String) ref; - } + Object ref = uuid_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + uuid_ = s; + return s; + } } + /** * string uuid = 4; + * * @return The bytes for uuid. */ - public com.google.protobuf.ByteString - getUuidBytes() { - Object ref = uuid_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - uuid_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string uuid = 4; - * @param value The uuid to set. - * @return This builder for chaining. - */ - public Builder setUuid( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - uuid_ = value; - onChanged(); - return this; - } - /** - * string uuid = 4; - * @return This builder for chaining. - */ - public Builder clearUuid() { - - uuid_ = getDefaultInstance().getUuid(); - onChanged(); - return this; - } - /** - * string uuid = 4; - * @param value The bytes for uuid to set. - * @return This builder for chaining. - */ - public Builder setUuidBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - uuid_ = value; - onChanged(); - return this; + @Override + public com.google.protobuf.ByteString getUuidBytes() { + Object ref = uuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + uuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - private com.google.protobuf.LazyStringList sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureSourceUuidsIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { - sourceUuids_ = new com.google.protobuf.LazyStringArrayList(sourceUuids_); - bitField0_ |= 0x00000001; - } - } + public static final int SOURCE_UUIDS_FIELD_NUMBER = 5; + private com.google.protobuf.LazyStringList sourceUuids_; + /** * repeated string source_uuids = 5; + * * @return A list containing the sourceUuids. */ - public com.google.protobuf.ProtocolStringList - getSourceUuidsList() { - return sourceUuids_.getUnmodifiableView(); + public com.google.protobuf.ProtocolStringList getSourceUuidsList() { + return sourceUuids_; } + /** * repeated string source_uuids = 5; + * * @return The count of sourceUuids. */ public int getSourceUuidsCount() { - return sourceUuids_.size(); + return sourceUuids_.size(); } + /** * repeated string source_uuids = 5; + * * @param index The index of the element to return. * @return The sourceUuids at the given index. */ public String getSourceUuids(int index) { - return sourceUuids_.get(index); + return sourceUuids_.get(index); } + /** * repeated string source_uuids = 5; + * * @param index The index of the value to return. * @return The bytes of the sourceUuids at the given index. */ - public com.google.protobuf.ByteString - getSourceUuidsBytes(int index) { - return sourceUuids_.getByteString(index); - } - /** - * repeated string source_uuids = 5; - * @param index The index to set the value at. - * @param value The sourceUuids to set. - * @return This builder for chaining. - */ - public Builder setSourceUuids( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSourceUuidsIsMutable(); - sourceUuids_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string source_uuids = 5; - * @param value The sourceUuids to add. - * @return This builder for chaining. - */ - public Builder addSourceUuids( - String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureSourceUuidsIsMutable(); - sourceUuids_.add(value); - onChanged(); - return this; - } - /** - * repeated string source_uuids = 5; - * @param values The sourceUuids to add. - * @return This builder for chaining. - */ - public Builder addAllSourceUuids( - Iterable values) { - ensureSourceUuidsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, sourceUuids_); - onChanged(); - return this; - } - /** - * repeated string source_uuids = 5; - * @return This builder for chaining. - */ - public Builder clearSourceUuids() { - sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - /** - * repeated string source_uuids = 5; - * @param value The bytes of the sourceUuids to add. - * @return This builder for chaining. - */ - public Builder addSourceUuidsBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - ensureSourceUuidsIsMutable(); - sourceUuids_.add(value); - onChanged(); - return this; + public com.google.protobuf.ByteString getSourceUuidsBytes(int index) { + return sourceUuids_.getByteString(index); } - private Object messageTypeId_ = ""; + public static final int MESSAGE_TYPE_ID_FIELD_NUMBER = 6; + private volatile Object messageTypeId_; + /** * string message_type_id = 6; + * * @return The messageTypeId. */ + @Override public String getMessageTypeId() { - Object ref = messageTypeId_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - messageTypeId_ = s; - return s; - } else { - return (String) ref; - } + Object ref = messageTypeId_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + messageTypeId_ = s; + return s; + } } + /** * string message_type_id = 6; + * * @return The bytes for messageTypeId. */ - public com.google.protobuf.ByteString - getMessageTypeIdBytes() { - Object ref = messageTypeId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - messageTypeId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string message_type_id = 6; - * @param value The messageTypeId to set. - * @return This builder for chaining. - */ - public Builder setMessageTypeId( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - messageTypeId_ = value; - onChanged(); - return this; - } - /** - * string message_type_id = 6; - * @return This builder for chaining. - */ - public Builder clearMessageTypeId() { - - messageTypeId_ = getDefaultInstance().getMessageTypeId(); - onChanged(); - return this; - } - /** - * string message_type_id = 6; - * @param value The bytes for messageTypeId to set. - * @return This builder for chaining. - */ - public Builder setMessageTypeIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - messageTypeId_ = value; - onChanged(); - return this; + @Override + public com.google.protobuf.ByteString getMessageTypeIdBytes() { + Object ref = messageTypeId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + messageTypeId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - private Object rawMessage_ = ""; + public static final int RAW_MESSAGE_FIELD_NUMBER = 7; + private volatile Object rawMessage_; + /** * string raw_message = 7; + * * @return The rawMessage. */ + @Override public String getRawMessage() { - Object ref = rawMessage_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - rawMessage_ = s; - return s; - } else { - return (String) ref; - } + Object ref = rawMessage_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + rawMessage_ = s; + return s; + } } + /** * string raw_message = 7; + * * @return The bytes for rawMessage. */ - public com.google.protobuf.ByteString - getRawMessageBytes() { - Object ref = rawMessage_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - rawMessage_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string raw_message = 7; - * @param value The rawMessage to set. - * @return This builder for chaining. - */ - public Builder setRawMessage( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - rawMessage_ = value; - onChanged(); - return this; - } - /** - * string raw_message = 7; - * @return This builder for chaining. - */ - public Builder clearRawMessage() { - - rawMessage_ = getDefaultInstance().getRawMessage(); - onChanged(); - return this; - } - /** - * string raw_message = 7; - * @param value The bytes for rawMessage to set. - * @return This builder for chaining. - */ - public Builder setRawMessageBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - rawMessage_ = value; - onChanged(); - return this; - } @Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); + public com.google.protobuf.ByteString getRawMessageBytes() { + Object ref = rawMessage_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + rawMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - @Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } + private byte memoizedIsInitialized = -1; + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; - // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.Header) - } + memoizedIsInitialized = 1; + return true; + } - // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.Header) - private static final Header DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new Header(); - } + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (time_ != null) { + output.writeMessage(1, getTime()); + } + if (!getSourceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, source_); + } + if (!getDestinationBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, destination_); + } + if (!getUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, uuid_); + } + for (int i = 0; i < sourceUuids_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, sourceUuids_.getRaw(i)); + } + if (!getMessageTypeIdBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, messageTypeId_); + } + if (!getRawMessageBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, rawMessage_); + } + unknownFields.writeTo(output); + } - public static Header getDefaultInstance() { - return DEFAULT_INSTANCE; - } + @Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) + return size; + + size = 0; + if (time_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTime()); + } + if (!getSourceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, source_); + } + if (!getDestinationBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, destination_); + } + if (!getUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, uuid_); + } + { + int dataSize = 0; + for (int i = 0; i < sourceUuids_.size(); i++) { + dataSize += computeStringSizeNoTag(sourceUuids_.getRaw(i)); + } + size += dataSize; + size += 1 * getSourceUuidsList().size(); + } + if (!getMessageTypeIdBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, messageTypeId_); + } + if (!getRawMessageBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, rawMessage_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Header)) { + return super.equals(obj); + } + Header other = (Header) obj; + + if (hasTime() != other.hasTime()) + return false; + if (hasTime()) { + if (!getTime().equals(other.getTime())) + return false; + } + if (!getSource().equals(other.getSource())) + return false; + if (!getDestination().equals(other.getDestination())) + return false; + if (!getUuid().equals(other.getUuid())) + return false; + if (!getSourceUuidsList().equals(other.getSourceUuidsList())) + return false; + if (!getMessageTypeId().equals(other.getMessageTypeId())) + return false; + if (!getRawMessage().equals(other.getRawMessage())) + return false; + if (!unknownFields.equals(other.unknownFields)) + return false; + return true; + } - private static final com.google.protobuf.Parser
- PARSER = new com.google.protobuf.AbstractParser
() { @Override - public Header parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Header(input, extensionRegistry); + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasTime()) { + hash = (37 * hash) + TIME_FIELD_NUMBER; + hash = (53 * hash) + getTime().hashCode(); + } + hash = (37 * hash) + SOURCE_FIELD_NUMBER; + hash = (53 * hash) + getSource().hashCode(); + hash = (37 * hash) + DESTINATION_FIELD_NUMBER; + hash = (53 * hash) + getDestination().hashCode(); + hash = (37 * hash) + UUID_FIELD_NUMBER; + hash = (53 * hash) + getUuid().hashCode(); + if (getSourceUuidsCount() > 0) { + hash = (37 * hash) + SOURCE_UUIDS_FIELD_NUMBER; + hash = (53 * hash) + getSourceUuidsList().hashCode(); + } + hash = (37 * hash) + MESSAGE_TYPE_ID_FIELD_NUMBER; + hash = (53 * hash) + getMessageTypeId().hashCode(); + hash = (37 * hash) + RAW_MESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getRawMessage().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; } - }; - public static com.google.protobuf.Parser
parser() { - return PARSER; - } + public static Header parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } - @Override - public com.google.protobuf.Parser
getParserForType() { - return PARSER; - } + public static Header parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } - @Override - public Header getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } + public static Header parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } -} + public static Header parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Header parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Header parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Header parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static Header parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } + + public static Header parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static Header parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static Header parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static Header parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } + + @Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(Header prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + + @Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } + + @Override + protected Builder newBuilderForType(BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + + /** + * Protobuf type {@code io.apicurio.tests.protobuf.Header} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.Header) + HeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; + } + + @Override + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable + .ensureFieldAccessorsInitialized(Header.class, Builder.class); + } + + // Construct using io.apicurio.tests.protobuf.Header.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + } + } + + @Override + public Builder clear() { + super.clear(); + if (timeBuilder_ == null) { + time_ = null; + } else { + time_ = null; + timeBuilder_ = null; + } + source_ = ""; + + destination_ = ""; + + uuid_ = ""; + + sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + messageTypeId_ = ""; + + rawMessage_ = ""; + + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Header_descriptor; + } + + @Override + public Header getDefaultInstanceForType() { + return Header.getDefaultInstance(); + } + + @Override + public Header build() { + Header result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public Header buildPartial() { + Header result = new Header(this); + int from_bitField0_ = bitField0_; + if (timeBuilder_ == null) { + result.time_ = time_; + } else { + result.time_ = timeBuilder_.build(); + } + result.source_ = source_; + result.destination_ = destination_; + result.uuid_ = uuid_; + if (((bitField0_ & 0x00000001) != 0)) { + sourceUuids_ = sourceUuids_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.sourceUuids_ = sourceUuids_; + result.messageTypeId_ = messageTypeId_; + result.rawMessage_ = rawMessage_; + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof Header) { + return mergeFrom((Header) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(Header other) { + if (other == Header.getDefaultInstance()) + return this; + if (other.hasTime()) { + mergeTime(other.getTime()); + } + if (!other.getSource().isEmpty()) { + source_ = other.source_; + onChanged(); + } + if (!other.getDestination().isEmpty()) { + destination_ = other.destination_; + onChanged(); + } + if (!other.getUuid().isEmpty()) { + uuid_ = other.uuid_; + onChanged(); + } + if (!other.sourceUuids_.isEmpty()) { + if (sourceUuids_.isEmpty()) { + sourceUuids_ = other.sourceUuids_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureSourceUuidsIsMutable(); + sourceUuids_.addAll(other.sourceUuids_); + } + onChanged(); + } + if (!other.getMessageTypeId().isEmpty()) { + messageTypeId_ = other.messageTypeId_; + onChanged(); + } + if (!other.getRawMessage().isEmpty()) { + rawMessage_ = other.rawMessage_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + + @Override + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + Header parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (Header) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int bitField0_; + + private com.google.protobuf.Timestamp time_; + private com.google.protobuf.SingleFieldBuilderV3 timeBuilder_; + + /** + * .google.protobuf.Timestamp time = 1; + * + * @return Whether the time field is set. + */ + public boolean hasTime() { + return timeBuilder_ != null || time_ != null; + } + + /** + * .google.protobuf.Timestamp time = 1; + * + * @return The time. + */ + public com.google.protobuf.Timestamp getTime() { + if (timeBuilder_ == null) { + return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_; + } else { + return timeBuilder_.getMessage(); + } + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + public Builder setTime(com.google.protobuf.Timestamp value) { + if (timeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + time_ = value; + onChanged(); + } else { + timeBuilder_.setMessage(value); + } + + return this; + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + public Builder setTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (timeBuilder_ == null) { + time_ = builderForValue.build(); + onChanged(); + } else { + timeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + public Builder mergeTime(com.google.protobuf.Timestamp value) { + if (timeBuilder_ == null) { + if (time_ != null) { + time_ = com.google.protobuf.Timestamp.newBuilder(time_).mergeFrom(value).buildPartial(); + } else { + time_ = value; + } + onChanged(); + } else { + timeBuilder_.mergeFrom(value); + } + + return this; + } + /** + * .google.protobuf.Timestamp time = 1; + */ + public Builder clearTime() { + if (timeBuilder_ == null) { + time_ = null; + onChanged(); + } else { + time_ = null; + timeBuilder_ = null; + } + + return this; + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + public com.google.protobuf.Timestamp.Builder getTimeBuilder() { + + onChanged(); + return getTimeFieldBuilder().getBuilder(); + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() { + if (timeBuilder_ != null) { + return timeBuilder_.getMessageOrBuilder(); + } else { + return time_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : time_; + } + } + + /** + * .google.protobuf.Timestamp time = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3 getTimeFieldBuilder() { + if (timeBuilder_ == null) { + timeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3( + getTime(), getParentForChildren(), isClean()); + time_ = null; + } + return timeBuilder_; + } + + private Object source_ = ""; + + /** + * string source = 2; + * + * @return The source. + */ + public String getSource() { + Object ref = source_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + source_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string source = 2; + * + * @return The bytes for source. + */ + public com.google.protobuf.ByteString getSourceBytes() { + Object ref = source_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + source_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string source = 2; + * + * @param value The source to set. + * @return This builder for chaining. + */ + public Builder setSource(String value) { + if (value == null) { + throw new NullPointerException(); + } + + source_ = value; + onChanged(); + return this; + } + + /** + * string source = 2; + * + * @return This builder for chaining. + */ + public Builder clearSource() { + + source_ = getDefaultInstance().getSource(); + onChanged(); + return this; + } + + /** + * string source = 2; + * + * @param value The bytes for source to set. + * @return This builder for chaining. + */ + public Builder setSourceBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + source_ = value; + onChanged(); + return this; + } + + private Object destination_ = ""; + + /** + * string destination = 3; + * + * @return The destination. + */ + public String getDestination() { + Object ref = destination_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + destination_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string destination = 3; + * + * @return The bytes for destination. + */ + public com.google.protobuf.ByteString getDestinationBytes() { + Object ref = destination_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + destination_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string destination = 3; + * + * @param value The destination to set. + * @return This builder for chaining. + */ + public Builder setDestination(String value) { + if (value == null) { + throw new NullPointerException(); + } + + destination_ = value; + onChanged(); + return this; + } + + /** + * string destination = 3; + * + * @return This builder for chaining. + */ + public Builder clearDestination() { + + destination_ = getDefaultInstance().getDestination(); + onChanged(); + return this; + } + + /** + * string destination = 3; + * + * @param value The bytes for destination to set. + * @return This builder for chaining. + */ + public Builder setDestinationBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + destination_ = value; + onChanged(); + return this; + } + + private Object uuid_ = ""; + + /** + * string uuid = 4; + * + * @return The uuid. + */ + public String getUuid() { + Object ref = uuid_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + uuid_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string uuid = 4; + * + * @return The bytes for uuid. + */ + public com.google.protobuf.ByteString getUuidBytes() { + Object ref = uuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + uuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string uuid = 4; + * + * @param value The uuid to set. + * @return This builder for chaining. + */ + public Builder setUuid(String value) { + if (value == null) { + throw new NullPointerException(); + } + + uuid_ = value; + onChanged(); + return this; + } + + /** + * string uuid = 4; + * + * @return This builder for chaining. + */ + public Builder clearUuid() { + + uuid_ = getDefaultInstance().getUuid(); + onChanged(); + return this; + } + + /** + * string uuid = 4; + * + * @param value The bytes for uuid to set. + * @return This builder for chaining. + */ + public Builder setUuidBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + uuid_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; + + private void ensureSourceUuidsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + sourceUuids_ = new com.google.protobuf.LazyStringArrayList(sourceUuids_); + bitField0_ |= 0x00000001; + } + } + + /** + * repeated string source_uuids = 5; + * + * @return A list containing the sourceUuids. + */ + public com.google.protobuf.ProtocolStringList getSourceUuidsList() { + return sourceUuids_.getUnmodifiableView(); + } + + /** + * repeated string source_uuids = 5; + * + * @return The count of sourceUuids. + */ + public int getSourceUuidsCount() { + return sourceUuids_.size(); + } + + /** + * repeated string source_uuids = 5; + * + * @param index The index of the element to return. + * @return The sourceUuids at the given index. + */ + public String getSourceUuids(int index) { + return sourceUuids_.get(index); + } + + /** + * repeated string source_uuids = 5; + * + * @param index The index of the value to return. + * @return The bytes of the sourceUuids at the given index. + */ + public com.google.protobuf.ByteString getSourceUuidsBytes(int index) { + return sourceUuids_.getByteString(index); + } + + /** + * repeated string source_uuids = 5; + * + * @param index The index to set the value at. + * @param value The sourceUuids to set. + * @return This builder for chaining. + */ + public Builder setSourceUuids(int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSourceUuidsIsMutable(); + sourceUuids_.set(index, value); + onChanged(); + return this; + } + + /** + * repeated string source_uuids = 5; + * + * @param value The sourceUuids to add. + * @return This builder for chaining. + */ + public Builder addSourceUuids(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureSourceUuidsIsMutable(); + sourceUuids_.add(value); + onChanged(); + return this; + } + + /** + * repeated string source_uuids = 5; + * + * @param values The sourceUuids to add. + * @return This builder for chaining. + */ + public Builder addAllSourceUuids(Iterable values) { + ensureSourceUuidsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sourceUuids_); + onChanged(); + return this; + } + + /** + * repeated string source_uuids = 5; + * + * @return This builder for chaining. + */ + public Builder clearSourceUuids() { + sourceUuids_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + /** + * repeated string source_uuids = 5; + * + * @param value The bytes of the sourceUuids to add. + * @return This builder for chaining. + */ + public Builder addSourceUuidsBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureSourceUuidsIsMutable(); + sourceUuids_.add(value); + onChanged(); + return this; + } + + private Object messageTypeId_ = ""; + + /** + * string message_type_id = 6; + * + * @return The messageTypeId. + */ + public String getMessageTypeId() { + Object ref = messageTypeId_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + messageTypeId_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string message_type_id = 6; + * + * @return The bytes for messageTypeId. + */ + public com.google.protobuf.ByteString getMessageTypeIdBytes() { + Object ref = messageTypeId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + messageTypeId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string message_type_id = 6; + * + * @param value The messageTypeId to set. + * @return This builder for chaining. + */ + public Builder setMessageTypeId(String value) { + if (value == null) { + throw new NullPointerException(); + } + + messageTypeId_ = value; + onChanged(); + return this; + } + + /** + * string message_type_id = 6; + * + * @return This builder for chaining. + */ + public Builder clearMessageTypeId() { + + messageTypeId_ = getDefaultInstance().getMessageTypeId(); + onChanged(); + return this; + } + + /** + * string message_type_id = 6; + * + * @param value The bytes for messageTypeId to set. + * @return This builder for chaining. + */ + public Builder setMessageTypeIdBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + messageTypeId_ = value; + onChanged(); + return this; + } + + private Object rawMessage_ = ""; + + /** + * string raw_message = 7; + * + * @return The rawMessage. + */ + public String getRawMessage() { + Object ref = rawMessage_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + rawMessage_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string raw_message = 7; + * + * @return The bytes for rawMessage. + */ + public com.google.protobuf.ByteString getRawMessageBytes() { + Object ref = rawMessage_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + rawMessage_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string raw_message = 7; + * + * @param value The rawMessage to set. + * @return This builder for chaining. + */ + public Builder setRawMessage(String value) { + if (value == null) { + throw new NullPointerException(); + } + + rawMessage_ = value; + onChanged(); + return this; + } + + /** + * string raw_message = 7; + * + * @return This builder for chaining. + */ + public Builder clearRawMessage() { + + rawMessage_ = getDefaultInstance().getRawMessage(); + onChanged(); + return this; + } + + /** + * string raw_message = 7; + * + * @param value The bytes for rawMessage to set. + * @return This builder for chaining. + */ + public Builder setRawMessageBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + rawMessage_ = value; + onChanged(); + return this; + } + + @Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.Header) + } + + // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.Header) + private static final Header DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new Header(); + } + + public static Header getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser
() { + @Override + public Header parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Header(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser
parser() { + return PARSER; + } + + @Override + public com.google.protobuf.Parser
getParserForType() { + return PARSER; + } + + @Override + public Header getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/HeaderOrBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/HeaderOrBuilder.java index b1944ceac2..6c7bb77ae6 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/HeaderOrBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/HeaderOrBuilder.java @@ -4,106 +4,125 @@ package io.apicurio.tests.protobuf; public interface HeaderOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.Header) - com.google.protobuf.MessageOrBuilder { - - /** - * .google.protobuf.Timestamp time = 1; - * @return Whether the time field is set. - */ - boolean hasTime(); - /** - * .google.protobuf.Timestamp time = 1; - * @return The time. - */ - com.google.protobuf.Timestamp getTime(); - /** - * .google.protobuf.Timestamp time = 1; - */ - com.google.protobuf.TimestampOrBuilder getTimeOrBuilder(); - - /** - * string source = 2; - * @return The source. - */ - String getSource(); - /** - * string source = 2; - * @return The bytes for source. - */ - com.google.protobuf.ByteString - getSourceBytes(); - - /** - * string destination = 3; - * @return The destination. - */ - String getDestination(); - /** - * string destination = 3; - * @return The bytes for destination. - */ - com.google.protobuf.ByteString - getDestinationBytes(); - - /** - * string uuid = 4; - * @return The uuid. - */ - String getUuid(); - /** - * string uuid = 4; - * @return The bytes for uuid. - */ - com.google.protobuf.ByteString - getUuidBytes(); - - /** - * repeated string source_uuids = 5; - * @return A list containing the sourceUuids. - */ - java.util.List - getSourceUuidsList(); - /** - * repeated string source_uuids = 5; - * @return The count of sourceUuids. - */ - int getSourceUuidsCount(); - /** - * repeated string source_uuids = 5; - * @param index The index of the element to return. - * @return The sourceUuids at the given index. - */ - String getSourceUuids(int index); - /** - * repeated string source_uuids = 5; - * @param index The index of the value to return. - * @return The bytes of the sourceUuids at the given index. - */ - com.google.protobuf.ByteString - getSourceUuidsBytes(int index); - - /** - * string message_type_id = 6; - * @return The messageTypeId. - */ - String getMessageTypeId(); - /** - * string message_type_id = 6; - * @return The bytes for messageTypeId. - */ - com.google.protobuf.ByteString - getMessageTypeIdBytes(); - - /** - * string raw_message = 7; - * @return The rawMessage. - */ - String getRawMessage(); - /** - * string raw_message = 7; - * @return The bytes for rawMessage. - */ - com.google.protobuf.ByteString - getRawMessageBytes(); + // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.Header) + com.google.protobuf.MessageOrBuilder { + + /** + * .google.protobuf.Timestamp time = 1; + * + * @return Whether the time field is set. + */ + boolean hasTime(); + + /** + * .google.protobuf.Timestamp time = 1; + * + * @return The time. + */ + com.google.protobuf.Timestamp getTime(); + + /** + * .google.protobuf.Timestamp time = 1; + */ + com.google.protobuf.TimestampOrBuilder getTimeOrBuilder(); + + /** + * string source = 2; + * + * @return The source. + */ + String getSource(); + + /** + * string source = 2; + * + * @return The bytes for source. + */ + com.google.protobuf.ByteString getSourceBytes(); + + /** + * string destination = 3; + * + * @return The destination. + */ + String getDestination(); + + /** + * string destination = 3; + * + * @return The bytes for destination. + */ + com.google.protobuf.ByteString getDestinationBytes(); + + /** + * string uuid = 4; + * + * @return The uuid. + */ + String getUuid(); + + /** + * string uuid = 4; + * + * @return The bytes for uuid. + */ + com.google.protobuf.ByteString getUuidBytes(); + + /** + * repeated string source_uuids = 5; + * + * @return A list containing the sourceUuids. + */ + java.util.List getSourceUuidsList(); + + /** + * repeated string source_uuids = 5; + * + * @return The count of sourceUuids. + */ + int getSourceUuidsCount(); + + /** + * repeated string source_uuids = 5; + * + * @param index The index of the element to return. + * @return The sourceUuids at the given index. + */ + String getSourceUuids(int index); + + /** + * repeated string source_uuids = 5; + * + * @param index The index of the value to return. + * @return The bytes of the sourceUuids at the given index. + */ + com.google.protobuf.ByteString getSourceUuidsBytes(int index); + + /** + * string message_type_id = 6; + * + * @return The messageTypeId. + */ + String getMessageTypeId(); + + /** + * string message_type_id = 6; + * + * @return The bytes for messageTypeId. + */ + com.google.protobuf.ByteString getMessageTypeIdBytes(); + + /** + * string raw_message = 7; + * + * @return The rawMessage. + */ + String getRawMessage(); + + /** + * string raw_message = 7; + * + * @return The bytes for rawMessage. + */ + com.google.protobuf.ByteString getRawMessageBytes(); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Point.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Point.java index 00f9b0e26c..ebc395cbf7 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Point.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Point.java @@ -6,906 +6,920 @@ /** * Protobuf type {@code io.apicurio.tests.protobuf.Point} */ -public final class Point extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.Point) +public final class Point extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.Point) PointOrBuilder { -private static final long serialVersionUID = 0L; - // Use Point.newBuilder() to construct. - private Point(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Point() { - } - - @Override - @SuppressWarnings({"unused"}) - protected Object newInstance( - UnusedPrivateParameter unused) { - return new Point(); - } - - @Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private Point( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 9: { - longitudeOneofCase_ = 1; - longitudeOneof_ = input.readDouble(); - break; - } - case 17: { - latitudeOneofCase_ = 2; - latitudeOneof_ = input.readDouble(); - break; - } - case 25: { - altitudeOneofCase_ = 3; - altitudeOneof_ = input.readDouble(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable - .ensureFieldAccessorsInitialized( - Point.class, Builder.class); - } - - private int longitudeOneofCase_ = 0; - private Object longitudeOneof_; - public enum LongitudeOneofCase - implements com.google.protobuf.Internal.EnumLite, - InternalOneOfEnum { - LONGITUDE(1), - LONGITUDEONEOF_NOT_SET(0); - private final int value; - private LongitudeOneofCase(int value) { - this.value = value; - } - /** - * @param value The number of the enum to look for. - * @return The enum associated with the given number. - * @deprecated Use {@link #forNumber(int)} instead. - */ - @Deprecated - public static LongitudeOneofCase valueOf(int value) { - return forNumber(value); - } - - public static LongitudeOneofCase forNumber(int value) { - switch (value) { - case 1: return LONGITUDE; - case 0: return LONGITUDEONEOF_NOT_SET; - default: return null; - } - } - public int getNumber() { - return this.value; - } - }; - - public LongitudeOneofCase - getLongitudeOneofCase() { - return LongitudeOneofCase.forNumber( - longitudeOneofCase_); - } - - private int latitudeOneofCase_ = 0; - private Object latitudeOneof_; - public enum LatitudeOneofCase - implements com.google.protobuf.Internal.EnumLite, - InternalOneOfEnum { - LATITUDE(2), - LATITUDEONEOF_NOT_SET(0); - private final int value; - private LatitudeOneofCase(int value) { - this.value = value; - } - /** - * @param value The number of the enum to look for. - * @return The enum associated with the given number. - * @deprecated Use {@link #forNumber(int)} instead. - */ - @Deprecated - public static LatitudeOneofCase valueOf(int value) { - return forNumber(value); - } - - public static LatitudeOneofCase forNumber(int value) { - switch (value) { - case 2: return LATITUDE; - case 0: return LATITUDEONEOF_NOT_SET; - default: return null; - } - } - public int getNumber() { - return this.value; - } - }; - - public LatitudeOneofCase - getLatitudeOneofCase() { - return LatitudeOneofCase.forNumber( - latitudeOneofCase_); - } - - private int altitudeOneofCase_ = 0; - private Object altitudeOneof_; - public enum AltitudeOneofCase - implements com.google.protobuf.Internal.EnumLite, - InternalOneOfEnum { - ALTITUDE(3), - ALTITUDEONEOF_NOT_SET(0); - private final int value; - private AltitudeOneofCase(int value) { - this.value = value; - } - /** - * @param value The number of the enum to look for. - * @return The enum associated with the given number. - * @deprecated Use {@link #forNumber(int)} instead. - */ - @Deprecated - public static AltitudeOneofCase valueOf(int value) { - return forNumber(value); - } - - public static AltitudeOneofCase forNumber(int value) { - switch (value) { - case 3: return ALTITUDE; - case 0: return ALTITUDEONEOF_NOT_SET; - default: return null; - } - } - public int getNumber() { - return this.value; - } - }; - - public AltitudeOneofCase - getAltitudeOneofCase() { - return AltitudeOneofCase.forNumber( - altitudeOneofCase_); - } - - public static final int LONGITUDE_FIELD_NUMBER = 1; - /** - * double longitude = 1; - * @return Whether the longitude field is set. - */ - @Override - public boolean hasLongitude() { - return longitudeOneofCase_ == 1; - } - /** - * double longitude = 1; - * @return The longitude. - */ - @Override - public double getLongitude() { - if (longitudeOneofCase_ == 1) { - return (Double) longitudeOneof_; - } - return 0D; - } - - public static final int LATITUDE_FIELD_NUMBER = 2; - /** - * double latitude = 2; - * @return Whether the latitude field is set. - */ - @Override - public boolean hasLatitude() { - return latitudeOneofCase_ == 2; - } - /** - * double latitude = 2; - * @return The latitude. - */ - @Override - public double getLatitude() { - if (latitudeOneofCase_ == 2) { - return (Double) latitudeOneof_; - } - return 0D; - } - - public static final int ALTITUDE_FIELD_NUMBER = 3; - /** - * double altitude = 3; - * @return Whether the altitude field is set. - */ - @Override - public boolean hasAltitude() { - return altitudeOneofCase_ == 3; - } - /** - * double altitude = 3; - * @return The altitude. - */ - @Override - public double getAltitude() { - if (altitudeOneofCase_ == 3) { - return (Double) altitudeOneof_; - } - return 0D; - } - - private byte memoizedIsInitialized = -1; - @Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (longitudeOneofCase_ == 1) { - output.writeDouble( - 1, (double)((Double) longitudeOneof_)); - } - if (latitudeOneofCase_ == 2) { - output.writeDouble( - 2, (double)((Double) latitudeOneof_)); - } - if (altitudeOneofCase_ == 3) { - output.writeDouble( - 3, (double)((Double) altitudeOneof_)); - } - unknownFields.writeTo(output); - } - - @Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (longitudeOneofCase_ == 1) { - size += com.google.protobuf.CodedOutputStream - .computeDoubleSize( - 1, (double)((Double) longitudeOneof_)); - } - if (latitudeOneofCase_ == 2) { - size += com.google.protobuf.CodedOutputStream - .computeDoubleSize( - 2, (double)((Double) latitudeOneof_)); - } - if (altitudeOneofCase_ == 3) { - size += com.google.protobuf.CodedOutputStream - .computeDoubleSize( - 3, (double)((Double) altitudeOneof_)); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @Override - public boolean equals(final Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof Point)) { - return super.equals(obj); - } - Point other = (Point) obj; - - if (!getLongitudeOneofCase().equals(other.getLongitudeOneofCase())) return false; - switch (longitudeOneofCase_) { - case 1: - if (Double.doubleToLongBits(getLongitude()) - != Double.doubleToLongBits( - other.getLongitude())) return false; - break; - case 0: - default: - } - if (!getLatitudeOneofCase().equals(other.getLatitudeOneofCase())) return false; - switch (latitudeOneofCase_) { - case 2: - if (Double.doubleToLongBits(getLatitude()) - != Double.doubleToLongBits( - other.getLatitude())) return false; - break; - case 0: - default: - } - if (!getAltitudeOneofCase().equals(other.getAltitudeOneofCase())) return false; - switch (altitudeOneofCase_) { - case 3: - if (Double.doubleToLongBits(getAltitude()) - != Double.doubleToLongBits( - other.getAltitude())) return false; - break; - case 0: - default: - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - switch (longitudeOneofCase_) { - case 1: - hash = (37 * hash) + LONGITUDE_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - Double.doubleToLongBits(getLongitude())); - break; - case 0: - default: - } - switch (latitudeOneofCase_) { - case 2: - hash = (37 * hash) + LATITUDE_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - Double.doubleToLongBits(getLatitude())); - break; - case 0: - default: - } - switch (altitudeOneofCase_) { - case 3: - hash = (37 * hash) + ALTITUDE_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - Double.doubleToLongBits(getAltitude())); - break; - case 0: - default: - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static Point parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Point parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Point parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Point parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Point parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static Point parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static Point parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static Point parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static Point parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static Point parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static Point parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static Point parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(Point prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @Override - protected Builder newBuilderForType( - BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code io.apicurio.tests.protobuf.Point} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.Point) - PointOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; - } + private static final long serialVersionUID = 0L; - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable - .ensureFieldAccessorsInitialized( - Point.class, Builder.class); + // Use Point.newBuilder() to construct. + private Point(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); } - // Construct using io.apicurio.tests.protobuf.Point.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); + private Point() { } - private Builder( - BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } @Override - public Builder clear() { - super.clear(); - longitudeOneofCase_ = 0; - longitudeOneof_ = null; - latitudeOneofCase_ = 0; - latitudeOneof_ = null; - altitudeOneofCase_ = 0; - altitudeOneof_ = null; - return this; + @SuppressWarnings({ "unused" }) + protected Object newInstance(UnusedPrivateParameter unused) { + return new Point(); } @Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; } - @Override - public Point getDefaultInstanceForType() { - return Point.getDefaultInstance(); + private Point(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 9: { + longitudeOneofCase_ = 1; + longitudeOneof_ = input.readDouble(); + break; + } + case 17: { + latitudeOneofCase_ = 2; + latitudeOneof_ = input.readDouble(); + break; + } + case 25: { + altitudeOneofCase_ = 3; + altitudeOneof_ = input.readDouble(); + break; + } + default: { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } } - @Override - public Point build() { - Point result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; } @Override - public Point buildPartial() { - Point result = new Point(this); - if (longitudeOneofCase_ == 1) { - result.longitudeOneof_ = longitudeOneof_; - } - if (latitudeOneofCase_ == 2) { - result.latitudeOneof_ = latitudeOneof_; - } - if (altitudeOneofCase_ == 3) { - result.altitudeOneof_ = altitudeOneof_; - } - result.longitudeOneofCase_ = longitudeOneofCase_; - result.latitudeOneofCase_ = latitudeOneofCase_; - result.altitudeOneofCase_ = altitudeOneofCase_; - onBuilt(); - return result; + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable + .ensureFieldAccessorsInitialized(Point.class, Builder.class); } - @Override - public Builder clone() { - return super.clone(); - } - @Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.setField(field, value); - } - @Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return super.setRepeatedField(field, index, value); - } - @Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.addRepeatedField(field, value); - } - @Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof Point) { - return mergeFrom((Point)other); - } else { - super.mergeFrom(other); - return this; - } - } + private int longitudeOneofCase_ = 0; + private Object longitudeOneof_; - public Builder mergeFrom(Point other) { - if (other == Point.getDefaultInstance()) return this; - switch (other.getLongitudeOneofCase()) { - case LONGITUDE: { - setLongitude(other.getLongitude()); - break; - } - case LONGITUDEONEOF_NOT_SET: { - break; - } - } - switch (other.getLatitudeOneofCase()) { - case LATITUDE: { - setLatitude(other.getLatitude()); - break; - } - case LATITUDEONEOF_NOT_SET: { - break; - } - } - switch (other.getAltitudeOneofCase()) { - case ALTITUDE: { - setAltitude(other.getAltitude()); - break; + public enum LongitudeOneofCase implements com.google.protobuf.Internal.EnumLite, InternalOneOfEnum { + LONGITUDE(1), LONGITUDEONEOF_NOT_SET(0); + + private final int value; + + private LongitudeOneofCase(int value) { + this.value = value; } - case ALTITUDEONEOF_NOT_SET: { - break; + + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @Deprecated + public static LongitudeOneofCase valueOf(int value) { + return forNumber(value); } - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - @Override - public final boolean isInitialized() { - return true; - } + public static LongitudeOneofCase forNumber(int value) { + switch (value) { + case 1: + return LONGITUDE; + case 0: + return LONGITUDEONEOF_NOT_SET; + default: + return null; + } + } - @Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Point parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (Point) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int longitudeOneofCase_ = 0; - private Object longitudeOneof_; - public LongitudeOneofCase - getLongitudeOneofCase() { - return LongitudeOneofCase.forNumber( - longitudeOneofCase_); - } + public int getNumber() { + return this.value; + } + }; - public Builder clearLongitudeOneof() { - longitudeOneofCase_ = 0; - longitudeOneof_ = null; - onChanged(); - return this; + public LongitudeOneofCase getLongitudeOneofCase() { + return LongitudeOneofCase.forNumber(longitudeOneofCase_); } private int latitudeOneofCase_ = 0; private Object latitudeOneof_; - public LatitudeOneofCase - getLatitudeOneofCase() { - return LatitudeOneofCase.forNumber( - latitudeOneofCase_); - } - public Builder clearLatitudeOneof() { - latitudeOneofCase_ = 0; - latitudeOneof_ = null; - onChanged(); - return this; + public enum LatitudeOneofCase implements com.google.protobuf.Internal.EnumLite, InternalOneOfEnum { + LATITUDE(2), LATITUDEONEOF_NOT_SET(0); + + private final int value; + + private LatitudeOneofCase(int value) { + this.value = value; + } + + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @Deprecated + public static LatitudeOneofCase valueOf(int value) { + return forNumber(value); + } + + public static LatitudeOneofCase forNumber(int value) { + switch (value) { + case 2: + return LATITUDE; + case 0: + return LATITUDEONEOF_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public LatitudeOneofCase getLatitudeOneofCase() { + return LatitudeOneofCase.forNumber(latitudeOneofCase_); } private int altitudeOneofCase_ = 0; private Object altitudeOneof_; - public AltitudeOneofCase - getAltitudeOneofCase() { - return AltitudeOneofCase.forNumber( - altitudeOneofCase_); - } - public Builder clearAltitudeOneof() { - altitudeOneofCase_ = 0; - altitudeOneof_ = null; - onChanged(); - return this; + public enum AltitudeOneofCase implements com.google.protobuf.Internal.EnumLite, InternalOneOfEnum { + ALTITUDE(3), ALTITUDEONEOF_NOT_SET(0); + + private final int value; + + private AltitudeOneofCase(int value) { + this.value = value; + } + + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @Deprecated + public static AltitudeOneofCase valueOf(int value) { + return forNumber(value); + } + + public static AltitudeOneofCase forNumber(int value) { + switch (value) { + case 3: + return ALTITUDE; + case 0: + return ALTITUDEONEOF_NOT_SET; + default: + return null; + } + } + + public int getNumber() { + return this.value; + } + }; + + public AltitudeOneofCase getAltitudeOneofCase() { + return AltitudeOneofCase.forNumber(altitudeOneofCase_); } + public static final int LONGITUDE_FIELD_NUMBER = 1; /** * double longitude = 1; + * * @return Whether the longitude field is set. */ + @Override public boolean hasLongitude() { - return longitudeOneofCase_ == 1; + return longitudeOneofCase_ == 1; } + /** * double longitude = 1; + * * @return The longitude. */ + @Override public double getLongitude() { - if (longitudeOneofCase_ == 1) { - return (Double) longitudeOneof_; - } - return 0D; - } - /** - * double longitude = 1; - * @param value The longitude to set. - * @return This builder for chaining. - */ - public Builder setLongitude(double value) { - longitudeOneofCase_ = 1; - longitudeOneof_ = value; - onChanged(); - return this; - } - /** - * double longitude = 1; - * @return This builder for chaining. - */ - public Builder clearLongitude() { - if (longitudeOneofCase_ == 1) { - longitudeOneofCase_ = 0; - longitudeOneof_ = null; - onChanged(); - } - return this; + if (longitudeOneofCase_ == 1) { + return (Double) longitudeOneof_; + } + return 0D; } + public static final int LATITUDE_FIELD_NUMBER = 2; + /** * double latitude = 2; + * * @return Whether the latitude field is set. */ + @Override public boolean hasLatitude() { - return latitudeOneofCase_ == 2; + return latitudeOneofCase_ == 2; } + /** * double latitude = 2; + * * @return The latitude. */ + @Override public double getLatitude() { - if (latitudeOneofCase_ == 2) { - return (Double) latitudeOneof_; - } - return 0D; - } - /** - * double latitude = 2; - * @param value The latitude to set. - * @return This builder for chaining. - */ - public Builder setLatitude(double value) { - latitudeOneofCase_ = 2; - latitudeOneof_ = value; - onChanged(); - return this; - } - /** - * double latitude = 2; - * @return This builder for chaining. - */ - public Builder clearLatitude() { - if (latitudeOneofCase_ == 2) { - latitudeOneofCase_ = 0; - latitudeOneof_ = null; - onChanged(); - } - return this; + if (latitudeOneofCase_ == 2) { + return (Double) latitudeOneof_; + } + return 0D; } + public static final int ALTITUDE_FIELD_NUMBER = 3; + /** * double altitude = 3; + * * @return Whether the altitude field is set. */ + @Override public boolean hasAltitude() { - return altitudeOneofCase_ == 3; + return altitudeOneofCase_ == 3; } + /** * double altitude = 3; + * * @return The altitude. */ + @Override public double getAltitude() { - if (altitudeOneofCase_ == 3) { - return (Double) altitudeOneof_; - } - return 0D; + if (altitudeOneofCase_ == 3) { + return (Double) altitudeOneof_; + } + return 0D; } - /** - * double altitude = 3; - * @param value The altitude to set. - * @return This builder for chaining. - */ - public Builder setAltitude(double value) { - altitudeOneofCase_ = 3; - altitudeOneof_ = value; - onChanged(); - return this; + + private byte memoizedIsInitialized = -1; + + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; + + memoizedIsInitialized = 1; + return true; } - /** - * double altitude = 3; - * @return This builder for chaining. - */ - public Builder clearAltitude() { - if (altitudeOneofCase_ == 3) { - altitudeOneofCase_ = 0; - altitudeOneof_ = null; - onChanged(); - } - return this; + + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (longitudeOneofCase_ == 1) { + output.writeDouble(1, (double) ((Double) longitudeOneof_)); + } + if (latitudeOneofCase_ == 2) { + output.writeDouble(2, (double) ((Double) latitudeOneof_)); + } + if (altitudeOneofCase_ == 3) { + output.writeDouble(3, (double) ((Double) altitudeOneof_)); + } + unknownFields.writeTo(output); } + @Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) + return size; + + size = 0; + if (longitudeOneofCase_ == 1) { + size += com.google.protobuf.CodedOutputStream.computeDoubleSize(1, + (double) ((Double) longitudeOneof_)); + } + if (latitudeOneofCase_ == 2) { + size += com.google.protobuf.CodedOutputStream.computeDoubleSize(2, + (double) ((Double) latitudeOneof_)); + } + if (altitudeOneofCase_ == 3) { + size += com.google.protobuf.CodedOutputStream.computeDoubleSize(3, + (double) ((Double) altitudeOneof_)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; } @Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof Point)) { + return super.equals(obj); + } + Point other = (Point) obj; + + if (!getLongitudeOneofCase().equals(other.getLongitudeOneofCase())) + return false; + switch (longitudeOneofCase_) { + case 1: + if (Double.doubleToLongBits(getLongitude()) != Double.doubleToLongBits(other.getLongitude())) + return false; + break; + case 0: + default: + } + if (!getLatitudeOneofCase().equals(other.getLatitudeOneofCase())) + return false; + switch (latitudeOneofCase_) { + case 2: + if (Double.doubleToLongBits(getLatitude()) != Double.doubleToLongBits(other.getLatitude())) + return false; + break; + case 0: + default: + } + if (!getAltitudeOneofCase().equals(other.getAltitudeOneofCase())) + return false; + switch (altitudeOneofCase_) { + case 3: + if (Double.doubleToLongBits(getAltitude()) != Double.doubleToLongBits(other.getAltitude())) + return false; + break; + case 0: + default: + } + if (!unknownFields.equals(other.unknownFields)) + return false; + return true; } + @Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (longitudeOneofCase_) { + case 1: + hash = (37 * hash) + LONGITUDE_FIELD_NUMBER; + hash = (53 * hash) + + com.google.protobuf.Internal.hashLong(Double.doubleToLongBits(getLongitude())); + break; + case 0: + default: + } + switch (latitudeOneofCase_) { + case 2: + hash = (37 * hash) + LATITUDE_FIELD_NUMBER; + hash = (53 * hash) + + com.google.protobuf.Internal.hashLong(Double.doubleToLongBits(getLatitude())); + break; + case 0: + default: + } + switch (altitudeOneofCase_) { + case 3: + hash = (37 * hash) + ALTITUDE_FIELD_NUMBER; + hash = (53 * hash) + + com.google.protobuf.Internal.hashLong(Double.doubleToLongBits(getAltitude())); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } - // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.Point) - } + public static Point parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } - // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.Point) - private static final Point DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new Point(); - } + public static Point parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } - public static Point getDefaultInstance() { - return DEFAULT_INSTANCE; - } + public static Point parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Point parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Point parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static Point parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static Point parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static Point parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } + + public static Point parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static Point parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static Point parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static Point parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { @Override - public Point parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Point(input, extensionRegistry); + public Builder newBuilderForType() { + return newBuilder(); } - }; - public static com.google.protobuf.Parser parser() { - return PARSER; - } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } - @Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } + public static Builder newBuilder(Point prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } - @Override - public Point getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } + @Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); + } -} + @Override + protected Builder newBuilderForType(BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + + /** + * Protobuf type {@code io.apicurio.tests.protobuf.Point} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.Point) + PointOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; + } + + @Override + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable + .ensureFieldAccessorsInitialized(Point.class, Builder.class); + } + + // Construct using io.apicurio.tests.protobuf.Point.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + } + } + + @Override + public Builder clear() { + super.clear(); + longitudeOneofCase_ = 0; + longitudeOneof_ = null; + latitudeOneofCase_ = 0; + latitudeOneof_ = null; + altitudeOneofCase_ = 0; + altitudeOneof_ = null; + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_Point_descriptor; + } + + @Override + public Point getDefaultInstanceForType() { + return Point.getDefaultInstance(); + } + + @Override + public Point build() { + Point result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public Point buildPartial() { + Point result = new Point(this); + if (longitudeOneofCase_ == 1) { + result.longitudeOneof_ = longitudeOneof_; + } + if (latitudeOneofCase_ == 2) { + result.latitudeOneof_ = latitudeOneof_; + } + if (altitudeOneofCase_ == 3) { + result.altitudeOneof_ = altitudeOneof_; + } + result.longitudeOneofCase_ = longitudeOneofCase_; + result.latitudeOneofCase_ = latitudeOneofCase_; + result.altitudeOneofCase_ = altitudeOneofCase_; + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof Point) { + return mergeFrom((Point) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(Point other) { + if (other == Point.getDefaultInstance()) + return this; + switch (other.getLongitudeOneofCase()) { + case LONGITUDE: { + setLongitude(other.getLongitude()); + break; + } + case LONGITUDEONEOF_NOT_SET: { + break; + } + } + switch (other.getLatitudeOneofCase()) { + case LATITUDE: { + setLatitude(other.getLatitude()); + break; + } + case LATITUDEONEOF_NOT_SET: { + break; + } + } + switch (other.getAltitudeOneofCase()) { + case ALTITUDE: { + setAltitude(other.getAltitude()); + break; + } + case ALTITUDEONEOF_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + @Override + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + Point parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (Point) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private int longitudeOneofCase_ = 0; + private Object longitudeOneof_; + + public LongitudeOneofCase getLongitudeOneofCase() { + return LongitudeOneofCase.forNumber(longitudeOneofCase_); + } + + public Builder clearLongitudeOneof() { + longitudeOneofCase_ = 0; + longitudeOneof_ = null; + onChanged(); + return this; + } + + private int latitudeOneofCase_ = 0; + private Object latitudeOneof_; + + public LatitudeOneofCase getLatitudeOneofCase() { + return LatitudeOneofCase.forNumber(latitudeOneofCase_); + } + + public Builder clearLatitudeOneof() { + latitudeOneofCase_ = 0; + latitudeOneof_ = null; + onChanged(); + return this; + } + + private int altitudeOneofCase_ = 0; + private Object altitudeOneof_; + + public AltitudeOneofCase getAltitudeOneofCase() { + return AltitudeOneofCase.forNumber(altitudeOneofCase_); + } + + public Builder clearAltitudeOneof() { + altitudeOneofCase_ = 0; + altitudeOneof_ = null; + onChanged(); + return this; + } + + /** + * double longitude = 1; + * + * @return Whether the longitude field is set. + */ + public boolean hasLongitude() { + return longitudeOneofCase_ == 1; + } + + /** + * double longitude = 1; + * + * @return The longitude. + */ + public double getLongitude() { + if (longitudeOneofCase_ == 1) { + return (Double) longitudeOneof_; + } + return 0D; + } + + /** + * double longitude = 1; + * + * @param value The longitude to set. + * @return This builder for chaining. + */ + public Builder setLongitude(double value) { + longitudeOneofCase_ = 1; + longitudeOneof_ = value; + onChanged(); + return this; + } + + /** + * double longitude = 1; + * + * @return This builder for chaining. + */ + public Builder clearLongitude() { + if (longitudeOneofCase_ == 1) { + longitudeOneofCase_ = 0; + longitudeOneof_ = null; + onChanged(); + } + return this; + } + + /** + * double latitude = 2; + * + * @return Whether the latitude field is set. + */ + public boolean hasLatitude() { + return latitudeOneofCase_ == 2; + } + + /** + * double latitude = 2; + * + * @return The latitude. + */ + public double getLatitude() { + if (latitudeOneofCase_ == 2) { + return (Double) latitudeOneof_; + } + return 0D; + } + + /** + * double latitude = 2; + * + * @param value The latitude to set. + * @return This builder for chaining. + */ + public Builder setLatitude(double value) { + latitudeOneofCase_ = 2; + latitudeOneof_ = value; + onChanged(); + return this; + } + + /** + * double latitude = 2; + * + * @return This builder for chaining. + */ + public Builder clearLatitude() { + if (latitudeOneofCase_ == 2) { + latitudeOneofCase_ = 0; + latitudeOneof_ = null; + onChanged(); + } + return this; + } + + /** + * double altitude = 3; + * + * @return Whether the altitude field is set. + */ + public boolean hasAltitude() { + return altitudeOneofCase_ == 3; + } + + /** + * double altitude = 3; + * + * @return The altitude. + */ + public double getAltitude() { + if (altitudeOneofCase_ == 3) { + return (Double) altitudeOneof_; + } + return 0D; + } + + /** + * double altitude = 3; + * + * @param value The altitude to set. + * @return This builder for chaining. + */ + public Builder setAltitude(double value) { + altitudeOneofCase_ = 3; + altitudeOneof_ = value; + onChanged(); + return this; + } + + /** + * double altitude = 3; + * + * @return This builder for chaining. + */ + public Builder clearAltitude() { + if (altitudeOneofCase_ == 3) { + altitudeOneofCase_ = 0; + altitudeOneof_ = null; + onChanged(); + } + return this; + } + + @Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.Point) + } + + // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.Point) + private static final Point DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new Point(); + } + + public static Point getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { + @Override + public Point parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Point(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @Override + public Point getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/PointOrBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/PointOrBuilder.java index 9e766f511e..73568f908b 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/PointOrBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/PointOrBuilder.java @@ -4,45 +4,54 @@ package io.apicurio.tests.protobuf; public interface PointOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.Point) - com.google.protobuf.MessageOrBuilder { - - /** - * double longitude = 1; - * @return Whether the longitude field is set. - */ - boolean hasLongitude(); - /** - * double longitude = 1; - * @return The longitude. - */ - double getLongitude(); - - /** - * double latitude = 2; - * @return Whether the latitude field is set. - */ - boolean hasLatitude(); - /** - * double latitude = 2; - * @return The latitude. - */ - double getLatitude(); - - /** - * double altitude = 3; - * @return Whether the altitude field is set. - */ - boolean hasAltitude(); - /** - * double altitude = 3; - * @return The altitude. - */ - double getAltitude(); - - public Point.LongitudeOneofCase getLongitudeOneofCase(); - - public Point.LatitudeOneofCase getLatitudeOneofCase(); - - public Point.AltitudeOneofCase getAltitudeOneofCase(); + // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.Point) + com.google.protobuf.MessageOrBuilder { + + /** + * double longitude = 1; + * + * @return Whether the longitude field is set. + */ + boolean hasLongitude(); + + /** + * double longitude = 1; + * + * @return The longitude. + */ + double getLongitude(); + + /** + * double latitude = 2; + * + * @return Whether the latitude field is set. + */ + boolean hasLatitude(); + + /** + * double latitude = 2; + * + * @return The latitude. + */ + double getLatitude(); + + /** + * double altitude = 3; + * + * @return Whether the altitude field is set. + */ + boolean hasAltitude(); + + /** + * double altitude = 3; + * + * @return The altitude. + */ + double getAltitude(); + + public Point.LongitudeOneofCase getLongitudeOneofCase(); + + public Point.LatitudeOneofCase getLatitudeOneofCase(); + + public Point.AltitudeOneofCase getAltitudeOneofCase(); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessage.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessage.java index c42a24e044..eabd01f42e 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessage.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessage.java @@ -6,1311 +6,1343 @@ /** * Protobuf type {@code io.apicurio.tests.protobuf.ProtobufTestMessage} */ -public final class ProtobufTestMessage extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.ProtobufTestMessage) +public final class ProtobufTestMessage extends com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:io.apicurio.tests.protobuf.ProtobufTestMessage) ProtobufTestMessageOrBuilder { -private static final long serialVersionUID = 0L; - // Use ProtobufTestMessage.newBuilder() to construct. - private ProtobufTestMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ProtobufTestMessage() { - s1_ = ""; - } - - @Override - @SuppressWarnings({"unused"}) - protected Object newInstance( - UnusedPrivateParameter unused) { - return new ProtobufTestMessage(); - } - - @Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ProtobufTestMessage( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - Header.Builder subBuilder = null; - if (header_ != null) { - subBuilder = header_.toBuilder(); - } - header_ = input.readMessage(Header.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(header_); - header_ = subBuilder.buildPartial(); - } - - break; - } - case 17: { - - d1_ = input.readDouble(); - break; - } - case 34: { - String s = input.readStringRequireUtf8(); - - s1_ = s; - break; - } - case 56: { - - i1_ = input.readInt32(); - break; - } - case 96: { - - bi1_ = input.readInt64(); - break; - } - case 178: { - com.google.protobuf.Timestamp.Builder subBuilder = null; - if (stateTime_ != null) { - subBuilder = stateTime_.toBuilder(); - } - stateTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(stateTime_); - stateTime_ = subBuilder.buildPartial(); - } - - break; - } - case 242: { - io.apicurio.tests.protobuf.Point.Builder subBuilder = null; - if (point_ != null) { - subBuilder = point_.toBuilder(); - } - point_ = input.readMessage(io.apicurio.tests.protobuf.Point.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(point_); - point_ = subBuilder.buildPartial(); - } + private static final long serialVersionUID = 0L; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - ProtobufTestMessage.class, Builder.class); - } - - public static final int HEADER_FIELD_NUMBER = 1; - private Header header_; - /** - * .io.apicurio.tests.protobuf.Header header = 1; - * @return Whether the header field is set. - */ - @Override - public boolean hasHeader() { - return header_ != null; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - * @return The header. - */ - @Override - public Header getHeader() { - return header_ == null ? Header.getDefaultInstance() : header_; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - @Override - public io.apicurio.tests.protobuf.HeaderOrBuilder getHeaderOrBuilder() { - return getHeader(); - } - - public static final int D1_FIELD_NUMBER = 2; - private double d1_; - /** - * double d1 = 2; - * @return The d1. - */ - @Override - public double getD1() { - return d1_; - } - - public static final int S1_FIELD_NUMBER = 4; - private volatile Object s1_; - /** - * string s1 = 4; - * @return The s1. - */ - @Override - public String getS1() { - Object ref = s1_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - s1_ = s; - return s; - } - } - /** - * string s1 = 4; - * @return The bytes for s1. - */ - @Override - public com.google.protobuf.ByteString - getS1Bytes() { - Object ref = s1_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - s1_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int I1_FIELD_NUMBER = 7; - private int i1_; - /** - * int32 i1 = 7; - * @return The i1. - */ - @Override - public int getI1() { - return i1_; - } - - public static final int BI1_FIELD_NUMBER = 12; - private long bi1_; - /** - * int64 bi1 = 12; - * @return The bi1. - */ - @Override - public long getBi1() { - return bi1_; - } - - public static final int STATE_TIME_FIELD_NUMBER = 22; - private com.google.protobuf.Timestamp stateTime_; - /** - * .google.protobuf.Timestamp state_time = 22; - * @return Whether the stateTime field is set. - */ - @Override - public boolean hasStateTime() { - return stateTime_ != null; - } - /** - * .google.protobuf.Timestamp state_time = 22; - * @return The stateTime. - */ - @Override - public com.google.protobuf.Timestamp getStateTime() { - return stateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateTime_; - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - @Override - public com.google.protobuf.TimestampOrBuilder getStateTimeOrBuilder() { - return getStateTime(); - } - - public static final int POINT_FIELD_NUMBER = 30; - private io.apicurio.tests.protobuf.Point point_; - /** - * .io.apicurio.tests.protobuf.Point point = 30; - * @return Whether the point field is set. - */ - @Override - public boolean hasPoint() { - return point_ != null; - } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - * @return The point. - */ - @Override - public io.apicurio.tests.protobuf.Point getPoint() { - return point_ == null ? io.apicurio.tests.protobuf.Point.getDefaultInstance() : point_; - } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - @Override - public io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder() { - return getPoint(); - } - - private byte memoizedIsInitialized = -1; - @Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (header_ != null) { - output.writeMessage(1, getHeader()); + // Use ProtobufTestMessage.newBuilder() to construct. + private ProtobufTestMessage(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); } - if (d1_ != 0D) { - output.writeDouble(2, d1_); - } - if (!getS1Bytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 4, s1_); - } - if (i1_ != 0) { - output.writeInt32(7, i1_); - } - if (bi1_ != 0L) { - output.writeInt64(12, bi1_); - } - if (stateTime_ != null) { - output.writeMessage(22, getStateTime()); - } - if (point_ != null) { - output.writeMessage(30, getPoint()); - } - unknownFields.writeTo(output); - } - - @Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (header_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getHeader()); - } - if (d1_ != 0D) { - size += com.google.protobuf.CodedOutputStream - .computeDoubleSize(2, d1_); - } - if (!getS1Bytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, s1_); - } - if (i1_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(7, i1_); - } - if (bi1_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(12, bi1_); - } - if (stateTime_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(22, getStateTime()); - } - if (point_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(30, getPoint()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @Override - public boolean equals(final Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof ProtobufTestMessage)) { - return super.equals(obj); - } - ProtobufTestMessage other = (ProtobufTestMessage) obj; - if (hasHeader() != other.hasHeader()) return false; - if (hasHeader()) { - if (!getHeader() - .equals(other.getHeader())) return false; - } - if (Double.doubleToLongBits(getD1()) - != Double.doubleToLongBits( - other.getD1())) return false; - if (!getS1() - .equals(other.getS1())) return false; - if (getI1() - != other.getI1()) return false; - if (getBi1() - != other.getBi1()) return false; - if (hasStateTime() != other.hasStateTime()) return false; - if (hasStateTime()) { - if (!getStateTime() - .equals(other.getStateTime())) return false; - } - if (hasPoint() != other.hasPoint()) return false; - if (hasPoint()) { - if (!getPoint() - .equals(other.getPoint())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasHeader()) { - hash = (37 * hash) + HEADER_FIELD_NUMBER; - hash = (53 * hash) + getHeader().hashCode(); - } - hash = (37 * hash) + D1_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - Double.doubleToLongBits(getD1())); - hash = (37 * hash) + S1_FIELD_NUMBER; - hash = (53 * hash) + getS1().hashCode(); - hash = (37 * hash) + I1_FIELD_NUMBER; - hash = (53 * hash) + getI1(); - hash = (37 * hash) + BI1_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - getBi1()); - if (hasStateTime()) { - hash = (37 * hash) + STATE_TIME_FIELD_NUMBER; - hash = (53 * hash) + getStateTime().hashCode(); - } - if (hasPoint()) { - hash = (37 * hash) + POINT_FIELD_NUMBER; - hash = (53 * hash) + getPoint().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static ProtobufTestMessage parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static ProtobufTestMessage parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static ProtobufTestMessage parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static ProtobufTestMessage parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static ProtobufTestMessage parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static ProtobufTestMessage parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static ProtobufTestMessage parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static ProtobufTestMessage parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static ProtobufTestMessage parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static ProtobufTestMessage parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static ProtobufTestMessage parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static ProtobufTestMessage parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(ProtobufTestMessage prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @Override - protected Builder newBuilderForType( - BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code io.apicurio.tests.protobuf.ProtobufTestMessage} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.ProtobufTestMessage) - ProtobufTestMessageOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; - } - - @Override - protected FieldAccessorTable - internalGetFieldAccessorTable() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - ProtobufTestMessage.class, Builder.class); - } - - // Construct using io.apicurio.tests.protobuf.ProtobufTestMessage.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); + private ProtobufTestMessage() { + s1_ = ""; } - private Builder( - BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } @Override - public Builder clear() { - super.clear(); - if (headerBuilder_ == null) { - header_ = null; - } else { - header_ = null; - headerBuilder_ = null; - } - d1_ = 0D; - - s1_ = ""; - - i1_ = 0; - - bi1_ = 0L; - - if (stateTimeBuilder_ == null) { - stateTime_ = null; - } else { - stateTime_ = null; - stateTimeBuilder_ = null; - } - if (pointBuilder_ == null) { - point_ = null; - } else { - point_ = null; - pointBuilder_ = null; - } - return this; + @SuppressWarnings({ "unused" }) + protected Object newInstance(UnusedPrivateParameter unused) { + return new ProtobufTestMessage(); } @Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; + public final com.google.protobuf.UnknownFieldSet getUnknownFields() { + return this.unknownFields; } - @Override - public ProtobufTestMessage getDefaultInstanceForType() { - return ProtobufTestMessage.getDefaultInstance(); - } - - @Override - public ProtobufTestMessage build() { - ProtobufTestMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @Override - public ProtobufTestMessage buildPartial() { - ProtobufTestMessage result = new ProtobufTestMessage(this); - if (headerBuilder_ == null) { - result.header_ = header_; - } else { - result.header_ = headerBuilder_.build(); - } - result.d1_ = d1_; - result.s1_ = s1_; - result.i1_ = i1_; - result.bi1_ = bi1_; - if (stateTimeBuilder_ == null) { - result.stateTime_ = stateTime_; - } else { - result.stateTime_ = stateTimeBuilder_.build(); - } - if (pointBuilder_ == null) { - result.point_ = point_; - } else { - result.point_ = pointBuilder_.build(); - } - onBuilt(); - return result; - } - - @Override - public Builder clone() { - return super.clone(); - } - @Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.setField(field, value); - } - @Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return super.setRepeatedField(field, index, value); - } - @Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return super.addRepeatedField(field, value); - } - @Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof ProtobufTestMessage) { - return mergeFrom((ProtobufTestMessage)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(ProtobufTestMessage other) { - if (other == ProtobufTestMessage.getDefaultInstance()) return this; - if (other.hasHeader()) { - mergeHeader(other.getHeader()); - } - if (other.getD1() != 0D) { - setD1(other.getD1()); - } - if (!other.getS1().isEmpty()) { - s1_ = other.s1_; - onChanged(); - } - if (other.getI1() != 0) { - setI1(other.getI1()); - } - if (other.getBi1() != 0L) { - setBi1(other.getBi1()); - } - if (other.hasStateTime()) { - mergeStateTime(other.getStateTime()); - } - if (other.hasPoint()) { - mergePoint(other.getPoint()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; + private ProtobufTestMessage(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet + .newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + Header.Builder subBuilder = null; + if (header_ != null) { + subBuilder = header_.toBuilder(); + } + header_ = input.readMessage(Header.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(header_); + header_ = subBuilder.buildPartial(); + } + + break; + } + case 17: { + + d1_ = input.readDouble(); + break; + } + case 34: { + String s = input.readStringRequireUtf8(); + + s1_ = s; + break; + } + case 56: { + + i1_ = input.readInt32(); + break; + } + case 96: { + + bi1_ = input.readInt64(); + break; + } + case 178: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (stateTime_ != null) { + subBuilder = stateTime_.toBuilder(); + } + stateTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(stateTime_); + stateTime_ = subBuilder.buildPartial(); + } + + break; + } + case 242: { + io.apicurio.tests.protobuf.Point.Builder subBuilder = null; + if (point_ != null) { + subBuilder = point_.toBuilder(); + } + point_ = input.readMessage(io.apicurio.tests.protobuf.Point.parser(), + extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(point_); + point_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } } - @Override - public final boolean isInitialized() { - return true; + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; } @Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - ProtobufTestMessage parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (ProtobufTestMessage) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized(ProtobufTestMessage.class, Builder.class); } + public static final int HEADER_FIELD_NUMBER = 1; private Header header_; - private com.google.protobuf.SingleFieldBuilderV3< - Header, Header.Builder, io.apicurio.tests.protobuf.HeaderOrBuilder> headerBuilder_; + /** * .io.apicurio.tests.protobuf.Header header = 1; + * * @return Whether the header field is set. */ + @Override public boolean hasHeader() { - return headerBuilder_ != null || header_ != null; + return header_ != null; } + /** * .io.apicurio.tests.protobuf.Header header = 1; + * * @return The header. */ + @Override public Header getHeader() { - if (headerBuilder_ == null) { return header_ == null ? Header.getDefaultInstance() : header_; - } else { - return headerBuilder_.getMessage(); - } } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - public Builder setHeader(Header value) { - if (headerBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - header_ = value; - onChanged(); - } else { - headerBuilder_.setMessage(value); - } - - return this; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - public Builder setHeader( - Header.Builder builderForValue) { - if (headerBuilder_ == null) { - header_ = builderForValue.build(); - onChanged(); - } else { - headerBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - public Builder mergeHeader(Header value) { - if (headerBuilder_ == null) { - if (header_ != null) { - header_ = - Header.newBuilder(header_).mergeFrom(value).buildPartial(); - } else { - header_ = value; - } - onChanged(); - } else { - headerBuilder_.mergeFrom(value); - } - return this; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - public Builder clearHeader() { - if (headerBuilder_ == null) { - header_ = null; - onChanged(); - } else { - header_ = null; - headerBuilder_ = null; - } - - return this; - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - public Header.Builder getHeaderBuilder() { - - onChanged(); - return getHeaderFieldBuilder().getBuilder(); - } /** * .io.apicurio.tests.protobuf.Header header = 1; */ + @Override public io.apicurio.tests.protobuf.HeaderOrBuilder getHeaderOrBuilder() { - if (headerBuilder_ != null) { - return headerBuilder_.getMessageOrBuilder(); - } else { - return header_ == null ? - Header.getDefaultInstance() : header_; - } - } - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - Header, Header.Builder, io.apicurio.tests.protobuf.HeaderOrBuilder> - getHeaderFieldBuilder() { - if (headerBuilder_ == null) { - headerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - Header, Header.Builder, io.apicurio.tests.protobuf.HeaderOrBuilder>( - getHeader(), - getParentForChildren(), - isClean()); - header_ = null; - } - return headerBuilder_; + return getHeader(); } - private double d1_ ; + public static final int D1_FIELD_NUMBER = 2; + private double d1_; + /** * double d1 = 2; + * * @return The d1. */ @Override public double getD1() { - return d1_; - } - /** - * double d1 = 2; - * @param value The d1 to set. - * @return This builder for chaining. - */ - public Builder setD1(double value) { - - d1_ = value; - onChanged(); - return this; - } - /** - * double d1 = 2; - * @return This builder for chaining. - */ - public Builder clearD1() { - - d1_ = 0D; - onChanged(); - return this; + return d1_; } - private Object s1_ = ""; + public static final int S1_FIELD_NUMBER = 4; + private volatile Object s1_; + /** * string s1 = 4; + * * @return The s1. */ + @Override public String getS1() { - Object ref = s1_; - if (!(ref instanceof String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - s1_ = s; - return s; - } else { - return (String) ref; - } + Object ref = s1_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + s1_ = s; + return s; + } } + /** * string s1 = 4; + * * @return The bytes for s1. */ - public com.google.protobuf.ByteString - getS1Bytes() { - Object ref = s1_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (String) ref); - s1_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string s1 = 4; - * @param value The s1 to set. - * @return This builder for chaining. - */ - public Builder setS1( - String value) { - if (value == null) { - throw new NullPointerException(); - } - - s1_ = value; - onChanged(); - return this; - } - /** - * string s1 = 4; - * @return This builder for chaining. - */ - public Builder clearS1() { - - s1_ = getDefaultInstance().getS1(); - onChanged(); - return this; - } - /** - * string s1 = 4; - * @param value The bytes for s1 to set. - * @return This builder for chaining. - */ - public Builder setS1Bytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - s1_ = value; - onChanged(); - return this; + @Override + public com.google.protobuf.ByteString getS1Bytes() { + Object ref = s1_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + s1_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - private int i1_ ; + public static final int I1_FIELD_NUMBER = 7; + private int i1_; + /** * int32 i1 = 7; + * * @return The i1. */ @Override public int getI1() { - return i1_; - } - /** - * int32 i1 = 7; - * @param value The i1 to set. - * @return This builder for chaining. - */ - public Builder setI1(int value) { - - i1_ = value; - onChanged(); - return this; - } - /** - * int32 i1 = 7; - * @return This builder for chaining. - */ - public Builder clearI1() { - - i1_ = 0; - onChanged(); - return this; + return i1_; } - private long bi1_ ; + public static final int BI1_FIELD_NUMBER = 12; + private long bi1_; + /** * int64 bi1 = 12; + * * @return The bi1. */ @Override public long getBi1() { - return bi1_; - } - /** - * int64 bi1 = 12; - * @param value The bi1 to set. - * @return This builder for chaining. - */ - public Builder setBi1(long value) { - - bi1_ = value; - onChanged(); - return this; - } - /** - * int64 bi1 = 12; - * @return This builder for chaining. - */ - public Builder clearBi1() { - - bi1_ = 0L; - onChanged(); - return this; + return bi1_; } + public static final int STATE_TIME_FIELD_NUMBER = 22; private com.google.protobuf.Timestamp stateTime_; - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> stateTimeBuilder_; + /** * .google.protobuf.Timestamp state_time = 22; + * * @return Whether the stateTime field is set. */ + @Override public boolean hasStateTime() { - return stateTimeBuilder_ != null || stateTime_ != null; + return stateTime_ != null; } + /** * .google.protobuf.Timestamp state_time = 22; + * * @return The stateTime. */ + @Override public com.google.protobuf.Timestamp getStateTime() { - if (stateTimeBuilder_ == null) { return stateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateTime_; - } else { - return stateTimeBuilder_.getMessage(); - } - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - public Builder setStateTime(com.google.protobuf.Timestamp value) { - if (stateTimeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - stateTime_ = value; - onChanged(); - } else { - stateTimeBuilder_.setMessage(value); - } - - return this; - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - public Builder setStateTime( - com.google.protobuf.Timestamp.Builder builderForValue) { - if (stateTimeBuilder_ == null) { - stateTime_ = builderForValue.build(); - onChanged(); - } else { - stateTimeBuilder_.setMessage(builderForValue.build()); - } - - return this; } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - public Builder mergeStateTime(com.google.protobuf.Timestamp value) { - if (stateTimeBuilder_ == null) { - if (stateTime_ != null) { - stateTime_ = - com.google.protobuf.Timestamp.newBuilder(stateTime_).mergeFrom(value).buildPartial(); - } else { - stateTime_ = value; - } - onChanged(); - } else { - stateTimeBuilder_.mergeFrom(value); - } - return this; - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - public Builder clearStateTime() { - if (stateTimeBuilder_ == null) { - stateTime_ = null; - onChanged(); - } else { - stateTime_ = null; - stateTimeBuilder_ = null; - } - - return this; - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - public com.google.protobuf.Timestamp.Builder getStateTimeBuilder() { - - onChanged(); - return getStateTimeFieldBuilder().getBuilder(); - } /** * .google.protobuf.Timestamp state_time = 22; */ + @Override public com.google.protobuf.TimestampOrBuilder getStateTimeOrBuilder() { - if (stateTimeBuilder_ != null) { - return stateTimeBuilder_.getMessageOrBuilder(); - } else { - return stateTime_ == null ? - com.google.protobuf.Timestamp.getDefaultInstance() : stateTime_; - } - } - /** - * .google.protobuf.Timestamp state_time = 22; - */ - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> - getStateTimeFieldBuilder() { - if (stateTimeBuilder_ == null) { - stateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( - getStateTime(), - getParentForChildren(), - isClean()); - stateTime_ = null; - } - return stateTimeBuilder_; + return getStateTime(); } + public static final int POINT_FIELD_NUMBER = 30; private io.apicurio.tests.protobuf.Point point_; - private com.google.protobuf.SingleFieldBuilderV3< - io.apicurio.tests.protobuf.Point, io.apicurio.tests.protobuf.Point.Builder, io.apicurio.tests.protobuf.PointOrBuilder> pointBuilder_; + /** * .io.apicurio.tests.protobuf.Point point = 30; + * * @return Whether the point field is set. */ + @Override public boolean hasPoint() { - return pointBuilder_ != null || point_ != null; + return point_ != null; } + /** * .io.apicurio.tests.protobuf.Point point = 30; + * * @return The point. */ + @Override public io.apicurio.tests.protobuf.Point getPoint() { - if (pointBuilder_ == null) { return point_ == null ? io.apicurio.tests.protobuf.Point.getDefaultInstance() : point_; - } else { - return pointBuilder_.getMessage(); - } } + /** * .io.apicurio.tests.protobuf.Point point = 30; */ - public Builder setPoint(io.apicurio.tests.protobuf.Point value) { - if (pointBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - point_ = value; - onChanged(); - } else { - pointBuilder_.setMessage(value); - } - - return this; + @Override + public io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder() { + return getPoint(); } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - public Builder setPoint( - io.apicurio.tests.protobuf.Point.Builder builderForValue) { - if (pointBuilder_ == null) { - point_ = builderForValue.build(); - onChanged(); - } else { - pointBuilder_.setMessage(builderForValue.build()); - } - - return this; + + private byte memoizedIsInitialized = -1; + + @Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) + return true; + if (isInitialized == 0) + return false; + + memoizedIsInitialized = 1; + return true; } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - public Builder mergePoint(io.apicurio.tests.protobuf.Point value) { - if (pointBuilder_ == null) { + + @Override + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { + if (header_ != null) { + output.writeMessage(1, getHeader()); + } + if (d1_ != 0D) { + output.writeDouble(2, d1_); + } + if (!getS1Bytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, s1_); + } + if (i1_ != 0) { + output.writeInt32(7, i1_); + } + if (bi1_ != 0L) { + output.writeInt64(12, bi1_); + } + if (stateTime_ != null) { + output.writeMessage(22, getStateTime()); + } if (point_ != null) { - point_ = - io.apicurio.tests.protobuf.Point.newBuilder(point_).mergeFrom(value).buildPartial(); - } else { - point_ = value; + output.writeMessage(30, getPoint()); } - onChanged(); - } else { - pointBuilder_.mergeFrom(value); - } + unknownFields.writeTo(output); + } + + @Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) + return size; - return this; + size = 0; + if (header_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getHeader()); + } + if (d1_ != 0D) { + size += com.google.protobuf.CodedOutputStream.computeDoubleSize(2, d1_); + } + if (!getS1Bytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, s1_); + } + if (i1_ != 0) { + size += com.google.protobuf.CodedOutputStream.computeInt32Size(7, i1_); + } + if (bi1_ != 0L) { + size += com.google.protobuf.CodedOutputStream.computeInt64Size(12, bi1_); + } + if (stateTime_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(22, getStateTime()); + } + if (point_ != null) { + size += com.google.protobuf.CodedOutputStream.computeMessageSize(30, getPoint()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - public Builder clearPoint() { - if (pointBuilder_ == null) { - point_ = null; - onChanged(); - } else { - point_ = null; - pointBuilder_ = null; - } - - return this; + + @Override + public boolean equals(final Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof ProtobufTestMessage)) { + return super.equals(obj); + } + ProtobufTestMessage other = (ProtobufTestMessage) obj; + + if (hasHeader() != other.hasHeader()) + return false; + if (hasHeader()) { + if (!getHeader().equals(other.getHeader())) + return false; + } + if (Double.doubleToLongBits(getD1()) != Double.doubleToLongBits(other.getD1())) + return false; + if (!getS1().equals(other.getS1())) + return false; + if (getI1() != other.getI1()) + return false; + if (getBi1() != other.getBi1()) + return false; + if (hasStateTime() != other.hasStateTime()) + return false; + if (hasStateTime()) { + if (!getStateTime().equals(other.getStateTime())) + return false; + } + if (hasPoint() != other.hasPoint()) + return false; + if (hasPoint()) { + if (!getPoint().equals(other.getPoint())) + return false; + } + if (!unknownFields.equals(other.unknownFields)) + return false; + return true; } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - public io.apicurio.tests.protobuf.Point.Builder getPointBuilder() { - - onChanged(); - return getPointFieldBuilder().getBuilder(); + + @Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasHeader()) { + hash = (37 * hash) + HEADER_FIELD_NUMBER; + hash = (53 * hash) + getHeader().hashCode(); + } + hash = (37 * hash) + D1_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(Double.doubleToLongBits(getD1())); + hash = (37 * hash) + S1_FIELD_NUMBER; + hash = (53 * hash) + getS1().hashCode(); + hash = (37 * hash) + I1_FIELD_NUMBER; + hash = (53 * hash) + getI1(); + hash = (37 * hash) + BI1_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBi1()); + if (hasStateTime()) { + hash = (37 * hash) + STATE_TIME_FIELD_NUMBER; + hash = (53 * hash) + getStateTime().hashCode(); + } + if (hasPoint()) { + hash = (37 * hash) + POINT_FIELD_NUMBER; + hash = (53 * hash) + getPoint().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - public io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder() { - if (pointBuilder_ != null) { - return pointBuilder_.getMessageOrBuilder(); - } else { - return point_ == null ? - io.apicurio.tests.protobuf.Point.getDefaultInstance() : point_; - } + + public static ProtobufTestMessage parseFrom(java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); } - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - private com.google.protobuf.SingleFieldBuilderV3< - io.apicurio.tests.protobuf.Point, io.apicurio.tests.protobuf.Point.Builder, io.apicurio.tests.protobuf.PointOrBuilder> - getPointFieldBuilder() { - if (pointBuilder_ == null) { - pointBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - io.apicurio.tests.protobuf.Point, io.apicurio.tests.protobuf.Point.Builder, io.apicurio.tests.protobuf.PointOrBuilder>( - getPoint(), - getParentForChildren(), - isClean()); - point_ = null; - } - return pointBuilder_; + + public static ProtobufTestMessage parseFrom(java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static ProtobufTestMessage parseFrom(com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static ProtobufTestMessage parseFrom(com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static ProtobufTestMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + + public static ProtobufTestMessage parseFrom(byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + + public static ProtobufTestMessage parseFrom(java.io.InputStream input) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static ProtobufTestMessage parseFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } + + public static ProtobufTestMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); + } + + public static ProtobufTestMessage parseDelimitedFrom(java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, + extensionRegistry); + } + + public static ProtobufTestMessage parseFrom(com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); + } + + public static ProtobufTestMessage parseFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry); + } + + @Override + public Builder newBuilderForType() { + return newBuilder(); + } + + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + + public static Builder newBuilder(ProtobufTestMessage prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } + @Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); + public Builder toBuilder() { + return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); + protected Builder newBuilderForType(BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; } + /** + * Protobuf type {@code io.apicurio.tests.protobuf.ProtobufTestMessage} + */ + public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder + implements + // @@protoc_insertion_point(builder_implements:io.apicurio.tests.protobuf.ProtobufTestMessage) + ProtobufTestMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; + } + + @Override + protected FieldAccessorTable internalGetFieldAccessorTable() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized(ProtobufTestMessage.class, Builder.class); + } - // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.ProtobufTestMessage) - } + // Construct using io.apicurio.tests.protobuf.ProtobufTestMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } - // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.ProtobufTestMessage) - private static final ProtobufTestMessage DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new ProtobufTestMessage(); - } + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } - public static ProtobufTestMessage getDefaultInstance() { - return DEFAULT_INSTANCE; - } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { + } + } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @Override - public ProtobufTestMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ProtobufTestMessage(input, extensionRegistry); + @Override + public Builder clear() { + super.clear(); + if (headerBuilder_ == null) { + header_ = null; + } else { + header_ = null; + headerBuilder_ = null; + } + d1_ = 0D; + + s1_ = ""; + + i1_ = 0; + + bi1_ = 0L; + + if (stateTimeBuilder_ == null) { + stateTime_ = null; + } else { + stateTime_ = null; + stateTimeBuilder_ = null; + } + if (pointBuilder_ == null) { + point_ = null; + } else { + point_ = null; + pointBuilder_ = null; + } + return this; + } + + @Override + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { + return Testmessage.internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; + } + + @Override + public ProtobufTestMessage getDefaultInstanceForType() { + return ProtobufTestMessage.getDefaultInstance(); + } + + @Override + public ProtobufTestMessage build() { + ProtobufTestMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @Override + public ProtobufTestMessage buildPartial() { + ProtobufTestMessage result = new ProtobufTestMessage(this); + if (headerBuilder_ == null) { + result.header_ = header_; + } else { + result.header_ = headerBuilder_.build(); + } + result.d1_ = d1_; + result.s1_ = s1_; + result.i1_ = i1_; + result.bi1_ = bi1_; + if (stateTimeBuilder_ == null) { + result.stateTime_ = stateTime_; + } else { + result.stateTime_ = stateTimeBuilder_.build(); + } + if (pointBuilder_ == null) { + result.point_ = point_; + } else { + result.point_ = pointBuilder_.build(); + } + onBuilt(); + return result; + } + + @Override + public Builder clone() { + return super.clone(); + } + + @Override + public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.setField(field, value); + } + + @Override + public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + + @Override + public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + + @Override + public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, + Object value) { + return super.setRepeatedField(field, index, value); + } + + @Override + public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { + return super.addRepeatedField(field, value); + } + + @Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof ProtobufTestMessage) { + return mergeFrom((ProtobufTestMessage) other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(ProtobufTestMessage other) { + if (other == ProtobufTestMessage.getDefaultInstance()) + return this; + if (other.hasHeader()) { + mergeHeader(other.getHeader()); + } + if (other.getD1() != 0D) { + setD1(other.getD1()); + } + if (!other.getS1().isEmpty()) { + s1_ = other.s1_; + onChanged(); + } + if (other.getI1() != 0) { + setI1(other.getI1()); + } + if (other.getBi1() != 0L) { + setBi1(other.getBi1()); + } + if (other.hasStateTime()) { + mergeStateTime(other.getStateTime()); + } + if (other.hasPoint()) { + mergePoint(other.getPoint()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @Override + public final boolean isInitialized() { + return true; + } + + @Override + public Builder mergeFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { + ProtobufTestMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (ProtobufTestMessage) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private Header header_; + private com.google.protobuf.SingleFieldBuilderV3 headerBuilder_; + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + * + * @return Whether the header field is set. + */ + public boolean hasHeader() { + return headerBuilder_ != null || header_ != null; + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + * + * @return The header. + */ + public Header getHeader() { + if (headerBuilder_ == null) { + return header_ == null ? Header.getDefaultInstance() : header_; + } else { + return headerBuilder_.getMessage(); + } + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public Builder setHeader(Header value) { + if (headerBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + header_ = value; + onChanged(); + } else { + headerBuilder_.setMessage(value); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public Builder setHeader(Header.Builder builderForValue) { + if (headerBuilder_ == null) { + header_ = builderForValue.build(); + onChanged(); + } else { + headerBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public Builder mergeHeader(Header value) { + if (headerBuilder_ == null) { + if (header_ != null) { + header_ = Header.newBuilder(header_).mergeFrom(value).buildPartial(); + } else { + header_ = value; + } + onChanged(); + } else { + headerBuilder_.mergeFrom(value); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public Builder clearHeader() { + if (headerBuilder_ == null) { + header_ = null; + onChanged(); + } else { + header_ = null; + headerBuilder_ = null; + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public Header.Builder getHeaderBuilder() { + + onChanged(); + return getHeaderFieldBuilder().getBuilder(); + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + public io.apicurio.tests.protobuf.HeaderOrBuilder getHeaderOrBuilder() { + if (headerBuilder_ != null) { + return headerBuilder_.getMessageOrBuilder(); + } else { + return header_ == null ? Header.getDefaultInstance() : header_; + } + } + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3 getHeaderFieldBuilder() { + if (headerBuilder_ == null) { + headerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3( + getHeader(), getParentForChildren(), isClean()); + header_ = null; + } + return headerBuilder_; + } + + private double d1_; + + /** + * double d1 = 2; + * + * @return The d1. + */ + @Override + public double getD1() { + return d1_; + } + + /** + * double d1 = 2; + * + * @param value The d1 to set. + * @return This builder for chaining. + */ + public Builder setD1(double value) { + + d1_ = value; + onChanged(); + return this; + } + + /** + * double d1 = 2; + * + * @return This builder for chaining. + */ + public Builder clearD1() { + + d1_ = 0D; + onChanged(); + return this; + } + + private Object s1_ = ""; + + /** + * string s1 = 4; + * + * @return The s1. + */ + public String getS1() { + Object ref = s1_; + if (!(ref instanceof String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + s1_ = s; + return s; + } else { + return (String) ref; + } + } + + /** + * string s1 = 4; + * + * @return The bytes for s1. + */ + public com.google.protobuf.ByteString getS1Bytes() { + Object ref = s1_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); + s1_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + /** + * string s1 = 4; + * + * @param value The s1 to set. + * @return This builder for chaining. + */ + public Builder setS1(String value) { + if (value == null) { + throw new NullPointerException(); + } + + s1_ = value; + onChanged(); + return this; + } + + /** + * string s1 = 4; + * + * @return This builder for chaining. + */ + public Builder clearS1() { + + s1_ = getDefaultInstance().getS1(); + onChanged(); + return this; + } + + /** + * string s1 = 4; + * + * @param value The bytes for s1 to set. + * @return This builder for chaining. + */ + public Builder setS1Bytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + s1_ = value; + onChanged(); + return this; + } + + private int i1_; + + /** + * int32 i1 = 7; + * + * @return The i1. + */ + @Override + public int getI1() { + return i1_; + } + + /** + * int32 i1 = 7; + * + * @param value The i1 to set. + * @return This builder for chaining. + */ + public Builder setI1(int value) { + + i1_ = value; + onChanged(); + return this; + } + + /** + * int32 i1 = 7; + * + * @return This builder for chaining. + */ + public Builder clearI1() { + + i1_ = 0; + onChanged(); + return this; + } + + private long bi1_; + + /** + * int64 bi1 = 12; + * + * @return The bi1. + */ + @Override + public long getBi1() { + return bi1_; + } + + /** + * int64 bi1 = 12; + * + * @param value The bi1 to set. + * @return This builder for chaining. + */ + public Builder setBi1(long value) { + + bi1_ = value; + onChanged(); + return this; + } + + /** + * int64 bi1 = 12; + * + * @return This builder for chaining. + */ + public Builder clearBi1() { + + bi1_ = 0L; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp stateTime_; + private com.google.protobuf.SingleFieldBuilderV3 stateTimeBuilder_; + + /** + * .google.protobuf.Timestamp state_time = 22; + * + * @return Whether the stateTime field is set. + */ + public boolean hasStateTime() { + return stateTimeBuilder_ != null || stateTime_ != null; + } + + /** + * .google.protobuf.Timestamp state_time = 22; + * + * @return The stateTime. + */ + public com.google.protobuf.Timestamp getStateTime() { + if (stateTimeBuilder_ == null) { + return stateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateTime_; + } else { + return stateTimeBuilder_.getMessage(); + } + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public Builder setStateTime(com.google.protobuf.Timestamp value) { + if (stateTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + stateTime_ = value; + onChanged(); + } else { + stateTimeBuilder_.setMessage(value); + } + + return this; + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public Builder setStateTime(com.google.protobuf.Timestamp.Builder builderForValue) { + if (stateTimeBuilder_ == null) { + stateTime_ = builderForValue.build(); + onChanged(); + } else { + stateTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public Builder mergeStateTime(com.google.protobuf.Timestamp value) { + if (stateTimeBuilder_ == null) { + if (stateTime_ != null) { + stateTime_ = com.google.protobuf.Timestamp.newBuilder(stateTime_).mergeFrom(value) + .buildPartial(); + } else { + stateTime_ = value; + } + onChanged(); + } else { + stateTimeBuilder_.mergeFrom(value); + } + + return this; + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public Builder clearStateTime() { + if (stateTimeBuilder_ == null) { + stateTime_ = null; + onChanged(); + } else { + stateTime_ = null; + stateTimeBuilder_ = null; + } + + return this; + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public com.google.protobuf.Timestamp.Builder getStateTimeBuilder() { + + onChanged(); + return getStateTimeFieldBuilder().getBuilder(); + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + public com.google.protobuf.TimestampOrBuilder getStateTimeOrBuilder() { + if (stateTimeBuilder_ != null) { + return stateTimeBuilder_.getMessageOrBuilder(); + } else { + return stateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : stateTime_; + } + } + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + private com.google.protobuf.SingleFieldBuilderV3 getStateTimeFieldBuilder() { + if (stateTimeBuilder_ == null) { + stateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3( + getStateTime(), getParentForChildren(), isClean()); + stateTime_ = null; + } + return stateTimeBuilder_; + } + + private io.apicurio.tests.protobuf.Point point_; + private com.google.protobuf.SingleFieldBuilderV3 pointBuilder_; + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + * + * @return Whether the point field is set. + */ + public boolean hasPoint() { + return pointBuilder_ != null || point_ != null; + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + * + * @return The point. + */ + public io.apicurio.tests.protobuf.Point getPoint() { + if (pointBuilder_ == null) { + return point_ == null ? io.apicurio.tests.protobuf.Point.getDefaultInstance() : point_; + } else { + return pointBuilder_.getMessage(); + } + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public Builder setPoint(io.apicurio.tests.protobuf.Point value) { + if (pointBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + point_ = value; + onChanged(); + } else { + pointBuilder_.setMessage(value); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public Builder setPoint(io.apicurio.tests.protobuf.Point.Builder builderForValue) { + if (pointBuilder_ == null) { + point_ = builderForValue.build(); + onChanged(); + } else { + pointBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public Builder mergePoint(io.apicurio.tests.protobuf.Point value) { + if (pointBuilder_ == null) { + if (point_ != null) { + point_ = io.apicurio.tests.protobuf.Point.newBuilder(point_).mergeFrom(value) + .buildPartial(); + } else { + point_ = value; + } + onChanged(); + } else { + pointBuilder_.mergeFrom(value); + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public Builder clearPoint() { + if (pointBuilder_ == null) { + point_ = null; + onChanged(); + } else { + point_ = null; + pointBuilder_ = null; + } + + return this; + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public io.apicurio.tests.protobuf.Point.Builder getPointBuilder() { + + onChanged(); + return getPointFieldBuilder().getBuilder(); + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + public io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder() { + if (pointBuilder_ != null) { + return pointBuilder_.getMessageOrBuilder(); + } else { + return point_ == null ? io.apicurio.tests.protobuf.Point.getDefaultInstance() : point_; + } + } + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + private com.google.protobuf.SingleFieldBuilderV3 getPointFieldBuilder() { + if (pointBuilder_ == null) { + pointBuilder_ = new com.google.protobuf.SingleFieldBuilderV3( + getPoint(), getParentForChildren(), isClean()); + point_ = null; + } + return pointBuilder_; + } + + @Override + public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @Override + public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + // @@protoc_insertion_point(builder_scope:io.apicurio.tests.protobuf.ProtobufTestMessage) } - }; - public static com.google.protobuf.Parser parser() { - return PARSER; - } + // @@protoc_insertion_point(class_scope:io.apicurio.tests.protobuf.ProtobufTestMessage) + private static final ProtobufTestMessage DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new ProtobufTestMessage(); + } - @Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } + public static ProtobufTestMessage getDefaultInstance() { + return DEFAULT_INSTANCE; + } - @Override - public ProtobufTestMessage getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } + private static final com.google.protobuf.Parser PARSER = new com.google.protobuf.AbstractParser() { + @Override + public ProtobufTestMessage parsePartialFrom(com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ProtobufTestMessage(input, extensionRegistry); + } + }; -} + public static com.google.protobuf.Parser parser() { + return PARSER; + } + @Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @Override + public ProtobufTestMessage getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessageOrBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessageOrBuilder.java index 3e447168f9..76c9bcc6a4 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessageOrBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/ProtobufTestMessageOrBuilder.java @@ -4,81 +4,98 @@ package io.apicurio.tests.protobuf; public interface ProtobufTestMessageOrBuilder extends - // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.ProtobufTestMessage) - com.google.protobuf.MessageOrBuilder { - - /** - * .io.apicurio.tests.protobuf.Header header = 1; - * @return Whether the header field is set. - */ - boolean hasHeader(); - /** - * .io.apicurio.tests.protobuf.Header header = 1; - * @return The header. - */ - Header getHeader(); - /** - * .io.apicurio.tests.protobuf.Header header = 1; - */ - HeaderOrBuilder getHeaderOrBuilder(); - - /** - * double d1 = 2; - * @return The d1. - */ - double getD1(); - - /** - * string s1 = 4; - * @return The s1. - */ - String getS1(); - /** - * string s1 = 4; - * @return The bytes for s1. - */ - com.google.protobuf.ByteString - getS1Bytes(); - - /** - * int32 i1 = 7; - * @return The i1. - */ - int getI1(); - - /** - * int64 bi1 = 12; - * @return The bi1. - */ - long getBi1(); - - /** - * .google.protobuf.Timestamp state_time = 22; - * @return Whether the stateTime field is set. - */ - boolean hasStateTime(); - /** - * .google.protobuf.Timestamp state_time = 22; - * @return The stateTime. - */ - com.google.protobuf.Timestamp getStateTime(); - /** - * .google.protobuf.Timestamp state_time = 22; - */ - com.google.protobuf.TimestampOrBuilder getStateTimeOrBuilder(); - - /** - * .io.apicurio.tests.protobuf.Point point = 30; - * @return Whether the point field is set. - */ - boolean hasPoint(); - /** - * .io.apicurio.tests.protobuf.Point point = 30; - * @return The point. - */ - Point getPoint(); - /** - * .io.apicurio.tests.protobuf.Point point = 30; - */ - io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder(); + // @@protoc_insertion_point(interface_extends:io.apicurio.tests.protobuf.ProtobufTestMessage) + com.google.protobuf.MessageOrBuilder { + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + * + * @return Whether the header field is set. + */ + boolean hasHeader(); + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + * + * @return The header. + */ + Header getHeader(); + + /** + * .io.apicurio.tests.protobuf.Header header = 1; + */ + HeaderOrBuilder getHeaderOrBuilder(); + + /** + * double d1 = 2; + * + * @return The d1. + */ + double getD1(); + + /** + * string s1 = 4; + * + * @return The s1. + */ + String getS1(); + + /** + * string s1 = 4; + * + * @return The bytes for s1. + */ + com.google.protobuf.ByteString getS1Bytes(); + + /** + * int32 i1 = 7; + * + * @return The i1. + */ + int getI1(); + + /** + * int64 bi1 = 12; + * + * @return The bi1. + */ + long getBi1(); + + /** + * .google.protobuf.Timestamp state_time = 22; + * + * @return Whether the stateTime field is set. + */ + boolean hasStateTime(); + + /** + * .google.protobuf.Timestamp state_time = 22; + * + * @return The stateTime. + */ + com.google.protobuf.Timestamp getStateTime(); + + /** + * .google.protobuf.Timestamp state_time = 22; + */ + com.google.protobuf.TimestampOrBuilder getStateTimeOrBuilder(); + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + * + * @return Whether the point field is set. + */ + boolean hasPoint(); + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + * + * @return The point. + */ + Point getPoint(); + + /** + * .io.apicurio.tests.protobuf.Point point = 30; + */ + io.apicurio.tests.protobuf.PointOrBuilder getPointOrBuilder(); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Testmessage.java b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Testmessage.java index 641164c889..4b34a92b43 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/protobuf/Testmessage.java +++ b/integration-tests/src/test/java/io/apicurio/tests/protobuf/Testmessage.java @@ -4,83 +4,66 @@ package io.apicurio.tests.protobuf; public final class Testmessage { - private Testmessage() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { - } + private Testmessage() { + } - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); - } - static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; - static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable; - static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_tests_protobuf_Header_descriptor; - static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable; - static final com.google.protobuf.Descriptors.Descriptor - internal_static_io_apicurio_tests_protobuf_Point_descriptor; - static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable; + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) { + } - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - String[] descriptorData = { - "\nGintegration-tests/testsuite/src/test/r" + - "esources/serdes/testmessage.proto\022\032io.ap" + - "icurio.tests.protobuf\032\037google/protobuf/t" + - "imestamp.proto\"\334\001\n\023ProtobufTestMessage\0222" + - "\n\006header\030\001 \001(\0132\".io.apicurio.tests.proto" + - "buf.Header\022\n\n\002d1\030\002 \001(\001\022\n\n\002s1\030\004 \001(\t\022\n\n\002i1" + - "\030\007 \001(\005\022\013\n\003bi1\030\014 \001(\003\022.\n\nstate_time\030\026 \001(\0132" + - "\032.google.protobuf.Timestamp\0220\n\005point\030\036 \001" + - "(\0132!.io.apicurio.tests.protobuf.Point\"\251\001" + - "\n\006Header\022(\n\004time\030\001 \001(\0132\032.google.protobuf" + - ".Timestamp\022\016\n\006source\030\002 \001(\t\022\023\n\013destinatio" + - "n\030\003 \001(\t\022\014\n\004uuid\030\004 \001(\t\022\024\n\014source_uuids\030\005 " + - "\003(\t\022\027\n\017message_type_id\030\006 \001(\t\022\023\n\013raw_mess" + - "age\030\007 \001(\t\"{\n\005Point\022\023\n\tlongitude\030\001 \001(\001H\000\022" + - "\022\n\010latitude\030\002 \001(\001H\001\022\022\n\010altitude\030\003 \001(\001H\002B" + - "\021\n\017longitude_oneofB\020\n\016latitude_oneofB\020\n\016" + - "altitude_oneofB\002P\001b\006proto3" - }; - descriptor = com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.protobuf.TimestampProto.getDescriptor(), - }); - internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor, - new String[] { "Header", "D1", "S1", "I1", "Bi1", "StateTime", "Point", }); - internal_static_io_apicurio_tests_protobuf_Header_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_tests_protobuf_Header_descriptor, - new String[] { "Time", "Source", "Destination", "Uuid", "SourceUuids", "MessageTypeId", "RawMessage", }); - internal_static_io_apicurio_tests_protobuf_Point_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_io_apicurio_tests_protobuf_Point_descriptor, - new String[] { "Longitude", "Latitude", "Altitude", "LongitudeOneof", "LatitudeOneof", "AltitudeOneof", }); - com.google.protobuf.TimestampProto.getDescriptor(); - } + public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); + } - // @@protoc_insertion_point(outer_class_scope) + static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_tests_protobuf_Header_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable; + static final com.google.protobuf.Descriptors.Descriptor internal_static_io_apicurio_tests_protobuf_Point_descriptor; + static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + return descriptor; + } + + private static com.google.protobuf.Descriptors.FileDescriptor descriptor; + static { + String[] descriptorData = { "\nGintegration-tests/testsuite/src/test/r" + + "esources/serdes/testmessage.proto\022\032io.ap" + + "icurio.tests.protobuf\032\037google/protobuf/t" + + "imestamp.proto\"\334\001\n\023ProtobufTestMessage\0222" + + "\n\006header\030\001 \001(\0132\".io.apicurio.tests.proto" + + "buf.Header\022\n\n\002d1\030\002 \001(\001\022\n\n\002s1\030\004 \001(\t\022\n\n\002i1" + + "\030\007 \001(\005\022\013\n\003bi1\030\014 \001(\003\022.\n\nstate_time\030\026 \001(\0132" + + "\032.google.protobuf.Timestamp\0220\n\005point\030\036 \001" + + "(\0132!.io.apicurio.tests.protobuf.Point\"\251\001" + + "\n\006Header\022(\n\004time\030\001 \001(\0132\032.google.protobuf" + + ".Timestamp\022\016\n\006source\030\002 \001(\t\022\023\n\013destinatio" + + "n\030\003 \001(\t\022\014\n\004uuid\030\004 \001(\t\022\024\n\014source_uuids\030\005 " + + "\003(\t\022\027\n\017message_type_id\030\006 \001(\t\022\023\n\013raw_mess" + + "age\030\007 \001(\t\"{\n\005Point\022\023\n\tlongitude\030\001 \001(\001H\000\022" + + "\022\n\010latitude\030\002 \001(\001H\001\022\022\n\010altitude\030\003 \001(\001H\002B" + + "\021\n\017longitude_oneofB\020\n\016latitude_oneofB\020\n\016" + + "altitude_oneofB\002P\001b\006proto3" }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( + descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.TimestampProto.getDescriptor(), }); + internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor = getDescriptor() + .getMessageTypes().get(0); + internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_tests_protobuf_ProtobufTestMessage_descriptor, + new String[] { "Header", "D1", "S1", "I1", "Bi1", "StateTime", "Point", }); + internal_static_io_apicurio_tests_protobuf_Header_descriptor = getDescriptor().getMessageTypes() + .get(1); + internal_static_io_apicurio_tests_protobuf_Header_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_tests_protobuf_Header_descriptor, new String[] { "Time", "Source", + "Destination", "Uuid", "SourceUuids", "MessageTypeId", "RawMessage", }); + internal_static_io_apicurio_tests_protobuf_Point_descriptor = getDescriptor().getMessageTypes() + .get(2); + internal_static_io_apicurio_tests_protobuf_Point_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_io_apicurio_tests_protobuf_Point_descriptor, new String[] { "Longitude", + "Latitude", "Altitude", "LongitudeOneof", "LatitudeOneof", "AltitudeOneof", }); + com.google.protobuf.TimestampProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroGenericRecordSchemaFactory.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroGenericRecordSchemaFactory.java index 8bdc03afcb..acde0845af 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroGenericRecordSchemaFactory.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroGenericRecordSchemaFactory.java @@ -49,22 +49,12 @@ public AvroGenericRecordSchemaFactory(List schemaKeys) { public Schema generateSchema() { if (schema == null) { - StringBuilder builder = new StringBuilder() - .append("{\"type\":\"record\"") - .append(",") - .append("\"name\":") - .append("\"") - .append(recordName) - .append("\""); + StringBuilder builder = new StringBuilder().append("{\"type\":\"record\"").append(",") + .append("\"name\":").append("\"").append(recordName).append("\""); if (this.namespace != null) { - builder.append(",") - .append("\"namespace\":") - .append("\"") - .append(this.namespace) - .append("\""); + builder.append(",").append("\"namespace\":").append("\"").append(this.namespace).append("\""); } - builder.append(",") - .append("\"fields\":["); + builder.append(",").append("\"fields\":["); boolean first = true; for (String schemaKey : schemaKeys) { if (!first) { diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroSerdeIT.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroSerdeIT.java index 96059c2d14..a4dca32f85 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroSerdeIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/AvroSerdeIT.java @@ -50,7 +50,7 @@ public class AvroSerdeIT extends ApicurioRegistryBaseIT { @Override public void cleanArtifacts() throws Exception { - //Don't clean up + // Don't clean up } @BeforeAll @@ -70,19 +70,16 @@ void testTopicIdStrategyFindLatest() throws Exception { String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", + List.of("key1")); - createArtifact("default", artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + createArtifact("default", artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) + .build().test(); } @Test @@ -92,20 +89,17 @@ void testSimpleTopicIdStrategyFindLatest() throws Exception { String artifactId = topicName; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", + List.of("key1")); - createArtifact(topicName, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + createArtifact(topicName, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, topicName) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(avroSchema::generateRecord) + .withDataValidator(avroSchema::validateRecord) + .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, topicName).build().test(); } @Test @@ -115,19 +109,16 @@ void testRecordIdStrategydFindLatest() throws Exception { String groupId = TestUtils.generateSubject(); String artifactId = TestUtils.generateSubject(); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("key1")); - - createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(RecordIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(groupId, artifactId, + List.of("key1")); + + createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(RecordIdStrategy.class).withDataGenerator(avroSchema::generateRecord) + .withDataValidator(avroSchema::validateRecord).build().test(); } @Test @@ -137,53 +128,48 @@ void testTopicRecordIdStrategydFindLatest() throws Exception { String groupId = TestUtils.generateSubject(); String recordName = TestUtils.generateSubject(); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(groupId, recordName, List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(groupId, recordName, + List.of("key1")); String artifactId = topicName + "-" + recordName; - createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicRecordIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(TopicRecordIdStrategy.class).withDataGenerator(avroSchema::generateRecord) + .withDataValidator(avroSchema::validateRecord).build().test(); } @Test @Tag(Constants.ACCEPTANCE) void testTopicIdStrategyAutoRegister() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", List.of("key1")); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") - .withAfterProduceValidator(() -> { - return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); - return true; - }); - }) - .build() - .test(); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", + List.of("key1")); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) + .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") + .withAfterProduceValidator(() -> { + return TestUtils.retry(() -> { + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); + registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); + return true; + }); + }).build().test(); - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - byte[] rawSchema = IoUtil.toBytes(registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get()); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + byte[] rawSchema = IoUtil + .toBytes(registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get()); assertEquals(new String(avroSchema.generateSchemaBytes()), new String(rawSchema)); @@ -196,19 +182,18 @@ void testAvroSerDesFailDifferentSchemaByContent() throws Exception { String groupId = TestUtils.generateSubject(); String artifactId = TestUtils.generateSubject(); - AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("keyA")); - AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("keyB")); - - createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(RecordIdStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(avroSchemaB::generateRecord) - .build() - .test(); + AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, artifactId, + List.of("keyA")); + AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, artifactId, + List.of("keyB")); + + createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(RecordIdStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(avroSchemaB::generateRecord).build().test(); } @Test @@ -218,20 +203,19 @@ void testAvroSerDesFailDifferentSchemaByRecordName() throws Exception { String groupId = TestUtils.generateSubject(); String artifactId = TestUtils.generateSubject(); - AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("keyA")); - AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, "notexistent", List.of("keyB")); - - createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(RecordIdStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(avroSchemaB::generateRecord) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, artifactId, + List.of("keyA")); + AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, + "notexistent", List.of("keyB")); + + createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(RecordIdStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(avroSchemaB::generateRecord) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -241,19 +225,18 @@ void testWrongSchema() throws Exception { String groupId = TestUtils.generateSubject(); String artifactId = topicName + "-value"; - AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, "myrecord", List.of("keyA")); - AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, "myrecord", List.of("keyB")); - - createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(avroSchemaB::generateRecord) - .build() - .test(); + AvroGenericRecordSchemaFactory avroSchemaA = new AvroGenericRecordSchemaFactory(groupId, "myrecord", + List.of("keyA")); + AvroGenericRecordSchemaFactory avroSchemaB = new AvroGenericRecordSchemaFactory(groupId, "myrecord", + List.of("keyB")); + + createArtifact(groupId, artifactId, ArtifactType.AVRO, avroSchemaA.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(avroSchemaB::generateRecord).build().test(); } @Test @@ -261,17 +244,14 @@ void testArtifactNotFound() throws Exception { String topicName = TestUtils.generateSubject(); kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("mygroup", "myrecord", List.of("keyB")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("mygroup", "myrecord", + List.of("keyB")); - //note, we don't create any artifact + // note, we don't create any artifact - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).build().test(); } @Test @@ -286,7 +266,7 @@ void testEvolveAvroApicurioReusingClient() throws Exception { } void evolveSchemaTest(boolean reuseClients) throws Exception { - //using TopicRecordIdStrategy + // using TopicRecordIdStrategy Class strategy = TopicRecordIdStrategy.class; @@ -296,10 +276,12 @@ void evolveSchemaTest(boolean reuseClients) throws Exception { String recordNamespace = TestUtils.generateGroupId(); String recordName = TestUtils.generateSubject(); String schemaKey = "key1"; - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(recordNamespace, recordName, List.of(schemaKey)); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(recordNamespace, + recordName, List.of(schemaKey)); String artifactId = topicName + "-" + recordName; - createArtifact(recordNamespace, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + createArtifact(recordNamespace, artifactId, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); SerdesTester tester = new SerdesTester<>(); if (reuseClients) { @@ -308,28 +290,35 @@ void evolveSchemaTest(boolean reuseClients) throws Exception { int messageCount = 10; - Producer producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); - Consumer consumer = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName); + Producer producer = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName, strategy); + Consumer consumer = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName); tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); tester.consumeMessages(consumer, topicName, messageCount, avroSchema::validateRecord); String schemaKey2 = "key2"; - AvroGenericRecordSchemaFactory avroSchema2 = new AvroGenericRecordSchemaFactory(recordNamespace, recordName, List.of(schemaKey, schemaKey2)); - createArtifactVersion(recordNamespace, artifactId, avroSchema2.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null); + AvroGenericRecordSchemaFactory avroSchema2 = new AvroGenericRecordSchemaFactory(recordNamespace, + recordName, List.of(schemaKey, schemaKey2)); + createArtifactVersion(recordNamespace, artifactId, avroSchema2.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null); if (!reuseClients) { - producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, + strategy); } tester.produceMessages(producer, topicName, avroSchema2::generateRecord, messageCount); if (!reuseClients) { - producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, + strategy); } tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); if (!reuseClients) { - consumer = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName); + consumer = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, + topicName); } { AtomicInteger schema1Counter = new AtomicInteger(0); @@ -349,26 +338,32 @@ void evolveSchemaTest(boolean reuseClients) throws Exception { } String schemaKey3 = "key3"; - AvroGenericRecordSchemaFactory avroSchema3 = new AvroGenericRecordSchemaFactory(recordNamespace, recordName, List.of(schemaKey, schemaKey2, schemaKey3)); - createArtifactVersion(recordNamespace, artifactId, avroSchema3.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null); + AvroGenericRecordSchemaFactory avroSchema3 = new AvroGenericRecordSchemaFactory(recordNamespace, + recordName, List.of(schemaKey, schemaKey2, schemaKey3)); + createArtifactVersion(recordNamespace, artifactId, avroSchema3.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null); if (!reuseClients) { - producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, + strategy); } tester.produceMessages(producer, topicName, avroSchema3::generateRecord, messageCount); if (!reuseClients) { - producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, + strategy); } tester.produceMessages(producer, topicName, avroSchema2::generateRecord, messageCount); if (!reuseClients) { - producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName, + strategy); } tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); if (!reuseClients) { - consumer = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName); + consumer = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, + topicName); } { AtomicInteger schema1Counter = new AtomicInteger(0); @@ -412,20 +407,27 @@ void testAvroConfluentForMultipleTopics() throws Exception { kafkaCluster.createTopic(topicName2, 1, 1); kafkaCluster.createTopic(topicName3, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, List.of(schemaKey)); - createArtifact("default", subjectName, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, + List.of(schemaKey)); + createArtifact("default", subjectName, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); SerdesTester tester = new SerdesTester<>(); int messageCount = 10; - Producer producer1 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName1, strategy); - Producer producer2 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName2, strategy); - Producer producer3 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName3, strategy); - Consumer consumer1 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName1); - Consumer consumer2 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName2); - Consumer consumer3 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName3); - + Producer producer1 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName1, strategy); + Producer producer2 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName2, strategy); + Producer producer3 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName3, strategy); + Consumer consumer1 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName1); + Consumer consumer2 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName2); + Consumer consumer3 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName3); tester.produceMessages(producer1, topicName1, avroSchema::generateRecord, messageCount); tester.produceMessages(producer2, topicName2, avroSchema::generateRecord, messageCount); @@ -440,96 +442,90 @@ void testAvroConfluentForMultipleTopics() throws Exception { @Test public void testAvroJSON() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecord3", List.of("bar")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecord3", + List.of("bar")); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") - .withProducerProperty(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON) - .withConsumerProperty(AvroKafkaSerdeConfig.AVRO_ENCODING, AvroKafkaSerdeConfig.AVRO_ENCODING_JSON) + .withProducerProperty(AvroKafkaSerdeConfig.AVRO_ENCODING, + AvroKafkaSerdeConfig.AVRO_ENCODING_JSON) + .withConsumerProperty(AvroKafkaSerdeConfig.AVRO_ENCODING, + AvroKafkaSerdeConfig.AVRO_ENCODING_JSON) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } - //TODO TEST avro specific record + // TODO TEST avro specific record @Test @Tag(ACCEPTANCE) public void testReflectAutoRegister() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withStrategy(TopicIdStrategy.class) - .withSerializer(serializer) - .withDeserializer(deserializer) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withStrategy(TopicIdStrategy.class).withSerializer(serializer).withDeserializer(deserializer) .withDataGenerator(i -> new TestObject("Apicurio")) .withDataValidator(o -> "Apicurio".equals(o.getName())) - .withProducerProperty(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()) + .withProducerProperty(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()) .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") - .withConsumerProperty(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, ReflectAvroDatumProvider.class.getName()) + .withConsumerProperty(AvroKafkaSerdeConfig.AVRO_DATUM_PROVIDER, + ReflectAvroDatumProvider.class.getName()) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } - // test use contentId headers @Test void testContentIdInHeaders() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", + List.of("key1")); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withProducerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withConsumerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } @@ -537,126 +533,118 @@ void testContentIdInHeaders() throws Exception { @Test void testContentIdInBody() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurio1", + List.of("key1")); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") .withProducerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withConsumerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } - //disabled because the setup process to have an artifact with different globalId/contentId is not reliable + // disabled because the setup process to have an artifact with different globalId/contentId is not + // reliable @Disabled // test producer use contentId, consumer default @Test void testProducerUsesContentIdConsumerUsesDefault() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - //create several artifacts before to ensure the globalId and contentId are not the same - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurioz", List.of("keyz")); - //create a duplicated artifact beforehand with the same content to force the contentId and globalId sequences to return different ids - createArtifact("default", TestUtils.generateArtifactId(), ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredConsumerTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) + // create several artifacts before to ensure the globalId and contentId are not the same + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurioz", + List.of("keyz")); + // create a duplicated artifact beforehand with the same content to force the contentId and globalId + // sequences to return different ids + createArtifact("default", TestUtils.generateArtifactId(), ArtifactType.AVRO, + avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredConsumerTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withProducerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } - //disabled because the setup process to have an artifact with different globalId/contentId is not reliable + // disabled because the setup process to have an artifact with different globalId/contentId is not + // reliable @Disabled // test producer use default, consumer use contentId @Test void testProducerUsesDefaultConsumerUsesContentId() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - //create artifact before to ensure the globalId and contentId are not the same - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurioz", List.of("keyz")); - //create a duplicated artifact beforehand with the same content to force the contentId and globalId sequences to return different ids - createArtifact("default", TestUtils.generateArtifactId(), ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredConsumerTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) + // create artifact before to ensure the globalId and contentId are not the same + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordapicurioz", + List.of("keyz")); + // create a duplicated artifact beforehand with the same content to force the contentId and globalId + // sequences to return different ids + createArtifact("default", TestUtils.generateArtifactId(), ArtifactType.AVRO, + avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredConsumerTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withConsumerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } /** * From issue https://github.com/Apicurio/apicurio-registry/issues/1479 + * * @throws Exception */ @Test void testFirstEmptyFieldConfusedAsMagicByte() throws Exception { - String s = "{\n" - + " \"type\": \"record\",\n" - + " \"name\": \"userInfo\",\n" - + " \"namespace\": \"my.example\",\n" - + " \"fields\": [\n" - + " {\n" + String s = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"userInfo\",\n" + + " \"namespace\": \"my.example\",\n" + " \"fields\": [\n" + " {\n" + " \"name\": \"username\",\n" - + " \"type\": [\"null\", { \"type\": \"string\"} ]\n" - + " }" - + " ]\n" + + " \"type\": [\"null\", { \"type\": \"string\"} ]\n" + " }" + " ]\n" + "} "; String topicName = TestUtils.generateTopic(); @@ -665,31 +653,26 @@ void testFirstEmptyFieldConfusedAsMagicByte() throws Exception { Schema schema = new Schema.Parser().parse(s); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator((c) -> { + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator((c) -> { GenericRecord record = new GenericData.Record(schema); - if ( c != 0 && (c % 2) == 0 ) { + if (c != 0 && (c % 2) == 0) { record.put("username", "value-" + c); } return record; - }) - .withDataValidator((record) -> { + }).withDataValidator((record) -> { return schema.equals(record.getSchema()); - }) - .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") + }).withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } @@ -703,19 +686,16 @@ void testFirstRequestFailsRateLimited() throws Exception { String topicName = TestUtils.generateSubject(); kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("mygroup", "myrecord", List.of("keyB")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("mygroup", + "myrecord", List.of("keyB")); - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) - //mock url that will return 429 status always - .withProducerProperty(SerdeConfig.REGISTRY_URL, mock.getMockUrl()) + // mock url that will return 429 status always + .withProducerProperty(SerdeConfig.REGISTRY_URL, mock.getMockUrl()) - .withSerializer(AvroKafkaSerializer.class) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .build() - .test(); + .withSerializer(AvroKafkaSerializer.class).withStrategy(TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).build().test(); } finally { mock.stop(); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaMsgFactory.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaMsgFactory.java index 2eb8bd7207..453a5a2ef8 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaMsgFactory.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaMsgFactory.java @@ -2,9 +2,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import io.apicurio.registry.utils.IoUtil; import io.apicurio.tests.common.serdes.json.Msg; import io.apicurio.tests.common.serdes.json.ValidMessage; -import io.apicurio.registry.utils.IoUtil; import java.io.InputStream; import java.util.Date; @@ -12,25 +12,13 @@ public class JsonSchemaMsgFactory { - private String jsonSchema = "{" + - " \"$id\": \"https://example.com/message.schema.json\"," + - " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + - " \"required\": [" + - " \"message\"," + - " \"time\"" + - " ]," + - " \"type\": \"object\"," + - " \"properties\": {" + - " \"message\": {" + - " \"description\": \"\"," + - " \"type\": \"string\"" + - " }," + - " \"time\": {" + - " \"description\": \"\"," + - " \"type\": \"number\"" + - " }" + - " }" + - "}"; + private String jsonSchema = "{" + " \"$id\": \"https://example.com/message.schema.json\"," + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\"," + " \"required\": [" + + " \"message\"," + " \"time\"" + " ]," + " \"type\": \"object\"," + + " \"properties\": {" + " \"message\": {" + " \"description\": \"\"," + + " \"type\": \"string\"" + " }," + " \"time\": {" + + " \"description\": \"\"," + " \"type\": \"number\"" + " }" + + " }" + "}"; public ValidMessage generateMessage(int count) { ValidMessage msg = new ValidMessage(); diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaSerdeIT.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaSerdeIT.java index 2a27c9267a..d37e626de6 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaSerdeIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/JsonSchemaSerdeIT.java @@ -1,19 +1,19 @@ package io.apicurio.tests.serdes.apicurio; import com.fasterxml.jackson.databind.JsonNode; -import io.apicurio.registry.types.ContentTypes; -import io.apicurio.tests.ApicurioRegistryBaseIT; -import io.apicurio.tests.common.serdes.json.InvalidMessage; -import io.apicurio.tests.common.serdes.json.ValidMessage; -import io.apicurio.tests.utils.Constants; -import io.apicurio.tests.utils.KafkaFacade; import io.apicurio.registry.serde.SerdeConfig; import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaDeserializer; import io.apicurio.registry.serde.jsonschema.JsonSchemaKafkaSerializer; import io.apicurio.registry.serde.strategy.SimpleTopicIdStrategy; import io.apicurio.registry.serde.strategy.TopicIdStrategy; import io.apicurio.registry.types.ArtifactType; +import io.apicurio.registry.types.ContentTypes; import io.apicurio.registry.utils.tests.TestUtils; +import io.apicurio.tests.ApicurioRegistryBaseIT; +import io.apicurio.tests.common.serdes.json.InvalidMessage; +import io.apicurio.tests.common.serdes.json.ValidMessage; +import io.apicurio.tests.utils.Constants; +import io.apicurio.tests.utils.KafkaFacade; import io.quarkus.test.junit.QuarkusIntegrationTest; import org.apache.kafka.connect.json.JsonSerializer; import org.junit.jupiter.api.AfterAll; @@ -34,7 +34,7 @@ public class JsonSchemaSerdeIT extends ApicurioRegistryBaseIT { @Override public void cleanArtifacts() throws Exception { - //Don't clean up + // Don't clean up } @BeforeAll @@ -56,17 +56,13 @@ void testTopicIdStrategyFindLatest() throws Exception { JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); - createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), ContentTypes.APPLICATION_JSON, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), + ContentTypes.APPLICATION_JSON, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage).build() + .test(); } @Test @@ -77,51 +73,47 @@ void testSimpleTopicIdStrategyFindLatest() throws Exception { JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); - createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), ContentTypes.APPLICATION_JSON, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), + ContentTypes.APPLICATION_JSON, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateMessage).build().test(); } - //there is no mechanism for json serdes to auto register a schema, yet -// @Test -// @Tag(Constants.ACCEPTANCE) -// void testTopicIdStrategyAutoRegister() throws Exception { -// String topicName = TestUtils.generateTopic(); -// //because of using TopicIdStrategy -// String artifactId = topicName + "-value"; -// kafkaCluster.createTopic(topicName, 1, 1); -// -// JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); -// -// new SimpleSerdesTesterBuilder() -// .withTopic(topicName) -// .withSerializer(serializer) -// .withDeserializer(deserializer) -// .withStrategy(TopicIdStrategy.class) -// .withDataGenerator(schema::generateMessage) -// .withDataValidator(schema::validateMessage) -// .withProducerProperty(SerdeConfigKeys.AUTO_REGISTER_ARTIFACT, "true") -// .withAfterProduceValidator(() -> { -// return TestUtils.retry(() -> registryClient.getArtifactMetaData(topicName, artifactId) != null); -// }) -// .build() -// .test(); -// -// -// ArtifactMetaData meta = registryClient.getArtifactMetaData(topicName, artifactId); -// byte[] rawSchema = IoUtil.toBytes(registryClient.getContentByGlobalId(meta.getGlobalId())); -// -// assertEquals(new String(schema.getSchemaBytes()), new String(rawSchema)); -// -// } + // there is no mechanism for json serdes to auto register a schema, yet + // @Test + // @Tag(Constants.ACCEPTANCE) + // void testTopicIdStrategyAutoRegister() throws Exception { + // String topicName = TestUtils.generateTopic(); + // //because of using TopicIdStrategy + // String artifactId = topicName + "-value"; + // kafkaCluster.createTopic(topicName, 1, 1); + // + // JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); + // + // new SimpleSerdesTesterBuilder() + // .withTopic(topicName) + // .withSerializer(serializer) + // .withDeserializer(deserializer) + // .withStrategy(TopicIdStrategy.class) + // .withDataGenerator(schema::generateMessage) + // .withDataValidator(schema::validateMessage) + // .withProducerProperty(SerdeConfigKeys.AUTO_REGISTER_ARTIFACT, "true") + // .withAfterProduceValidator(() -> { + // return TestUtils.retry(() -> registryClient.getArtifactMetaData(topicName, artifactId) != null); + // }) + // .build() + // .test(); + // + // + // ArtifactMetaData meta = registryClient.getArtifactMetaData(topicName, artifactId); + // byte[] rawSchema = IoUtil.toBytes(registryClient.getContentByGlobalId(meta.getGlobalId())); + // + // assertEquals(new String(schema.getSchemaBytes()), new String(rawSchema)); + // + // } @Test void testConsumeReturnSpecificClass() throws Exception { @@ -131,18 +123,16 @@ void testConsumeReturnSpecificClass() throws Exception { JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); - createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), ContentTypes.APPLICATION_JSON, null, null); - - new SimpleSerdesTesterBuilder>() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateAsMap) - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, Map.class.getName()) - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.JSON, schema.getSchemaString(), + ContentTypes.APPLICATION_JSON, null, null); + + new SimpleSerdesTesterBuilder>().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateAsMap) + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + Map.class.getName()) + .build().test(); } @Test @@ -154,23 +144,20 @@ void testWrongSchema() throws Exception { String artifactId = topicName + "-value"; JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); -// ProtobufUUIDTestMessage schemaB = new ProtobufUUIDTestMessage(); - - createArtifact(groupId, artifactId, ArtifactType.JSON, schema.getSchemaString(), ContentTypes.APPLICATION_JSON, null, null); - - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(count -> { - InvalidMessage msg = new InvalidMessage(); - msg.setBar("aa"); - msg.setFoo("ss"); - return msg; - }) - .build() - .test(); + // ProtobufUUIDTestMessage schemaB = new ProtobufUUIDTestMessage(); + + createArtifact(groupId, artifactId, ArtifactType.JSON, schema.getSchemaString(), + ContentTypes.APPLICATION_JSON, null, null); + + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(count -> { + InvalidMessage msg = new InvalidMessage(); + msg.setBar("aa"); + msg.setFoo("ss"); + return msg; + }).build().test(); } @Test @@ -180,15 +167,11 @@ void testArtifactNotFound() throws Exception { JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); - //note, we don't create any artifact + // note, we don't create any artifact - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName).withSerializer(serializer) + .withStrategy(TopicIdStrategy.class).withDataGenerator(schema::generateMessage).build() + .test(); } @Test @@ -196,29 +179,28 @@ void testDefaultFallback() throws Exception { String topicName = TestUtils.generateTopic(); kafkaCluster.createTopic(topicName, 1, 1); - String groupId = TestUtils.generateSubject(); String artifactId = TestUtils.generateSubject(); JsonSchemaMsgFactory schema = new JsonSchemaMsgFactory(); - createArtifact(groupId, artifactId, ArtifactType.JSON, schema.getSchemaString(), ContentTypes.APPLICATION_JSON, null, null); - - //this test will produce messages using JsonSerializer, which does nothing with the registry and just serializes as json - //the produced messages won't have the id of the artifact - //the consumer will read the messages and because there is no id information in the messages the resolver will fail - //the default fallback will kick in and use the artifact from the configured properties - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(JsonSerializer.class) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessageJsonNode) - .withDataValidator(schema::validateMessage) - .withConsumerProperty(SerdeConfig.FALLBACK_ARTIFACT_GROUP_ID, groupId) - .withConsumerProperty(SerdeConfig.FALLBACK_ARTIFACT_ID, artifactId) - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, ValidMessage.class.getName()) - .build() - .test(); + createArtifact(groupId, artifactId, ArtifactType.JSON, schema.getSchemaString(), + ContentTypes.APPLICATION_JSON, null, null); + + // this test will produce messages using JsonSerializer, which does nothing with the registry and just + // serializes as json + // the produced messages won't have the id of the artifact + // the consumer will read the messages and because there is no id information in the messages the + // resolver will fail + // the default fallback will kick in and use the artifact from the configured properties + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(JsonSerializer.class).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessageJsonNode) + .withDataValidator(schema::validateMessage) + .withConsumerProperty(SerdeConfig.FALLBACK_ARTIFACT_GROUP_ID, groupId) + .withConsumerProperty(SerdeConfig.FALLBACK_ARTIFACT_ID, artifactId) + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + ValidMessage.class.getName()) + .build().test(); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufSerdeIT.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufSerdeIT.java index 38c3f3a1bc..a67390101d 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufSerdeIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufSerdeIT.java @@ -45,7 +45,7 @@ void teardownEnvironment() throws Exception { @Override public void cleanArtifacts() throws Exception { - //Don't clean up + // Don't clean up } @Test @@ -57,18 +57,13 @@ void testTopicIdStrategyFindLatest() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -79,18 +74,14 @@ void testSimpleTopicIdStrategyFindLatest() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateMessage) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -103,16 +94,13 @@ void testWrongSchema() throws Exception { ProtobufTestMessageFactory schemaA = new ProtobufTestMessageFactory(); ProtobufUUIDTestMessage schemaB = new ProtobufUUIDTestMessage(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaA.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaA.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(schemaB::generateMessage) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(schemaB::generateMessage).build().test(); } @Test @@ -125,17 +113,14 @@ void testWrongSchemaFindLatest() throws Exception { ProtobufTestMessageFactory schemaA = new ProtobufTestMessageFactory(); ProtobufUUIDTestMessage schemaB = new ProtobufUUIDTestMessage(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaA.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(schemaB::generateMessage) - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaA.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(schemaB::generateMessage).build().test(); } @Test @@ -145,22 +130,19 @@ void testArtifactNotFound() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - //note, we don't create any artifact + // note, we don't create any artifact - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).build().test(); } /** - * This test creates one artifact with two versions, v1 and v2 each one incompatile with the other. - * This test verifies the ability of the protobuf serdes to find a specific version of the artifact, to find the latest or to find the artifact by content. - * At the same time the test verifies the serdes perform validation before serializing and they fail - * when the serdes is configured to use one schema but the data passed does not correspond to that schema + * This test creates one artifact with two versions, v1 and v2 each one incompatile with the other. This + * test verifies the ability of the protobuf serdes to find a specific version of the artifact, to find + * the latest or to find the artifact by content. At the same time the test verifies the serdes perform + * validation before serializing and they fail when the serdes is configured to use one schema but the + * data passed does not correspond to that schema */ @Test void testValidation() throws Exception { @@ -172,72 +154,46 @@ void testValidation() throws Exception { ProtobufTestMessageFactory schemaV1 = new ProtobufTestMessageFactory(); ProtobufUUIDTestMessage schemaV2 = new ProtobufUUIDTestMessage(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaV1.generateArtificialSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - createArtifactVersion("default", artifactId, schemaV2.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null); - - //by default the artifact is found by content so this should work by finding the version 1 of the artifact - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_VERSION, "1") - .withDataGenerator(schemaV1::generateMessage) - .withDataValidator(schemaV1::validateMessage) - .build() - .test(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schemaV1::generateMessage) - .withDataValidator(schemaV1::validateMessage) - .build() - .test(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schemaV2::generateMessage) - .withDataValidator(schemaV2::validateTypeMessage) - .build() - .test(); - - //if find latest is enabled and we use the v1 schema it should fail. Validation is enabled by default - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withStrategy(TopicIdStrategy.class) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - //note, we use an incorrect wrong data generator in purpose - //find latest will find the v2 artifact but we try to send with v1 artifact, this should fail - .withDataGenerator(schemaV1::generateMessage) - .build() - .test(); - - //if find latest is enabled and we use the v2 schema it should work. Validation is enabled by default - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .withDataGenerator(schemaV2::generateMessage) - .withDataValidator(schemaV2::validateTypeMessage) - .build() - .test(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_VERSION, "2") - .withDataGenerator(schemaV2::generateMessage) - .withDataValidator(schemaV2::validateTypeMessage) - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, + schemaV1.generateArtificialSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); + createArtifactVersion("default", artifactId, schemaV2.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null); + + // by default the artifact is found by content so this should work by finding the version 1 of the + // artifact + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_VERSION, "1") + .withDataGenerator(schemaV1::generateMessage).withDataValidator(schemaV1::validateMessage) + .build().test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schemaV1::generateMessage).withDataValidator(schemaV1::validateMessage) + .build().test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schemaV2::generateMessage).withDataValidator(schemaV2::validateTypeMessage) + .build().test(); + + // if find latest is enabled and we use the v1 schema it should fail. Validation is enabled by default + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withStrategy(TopicIdStrategy.class) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") + // note, we use an incorrect wrong data generator in purpose + // find latest will find the v2 artifact but we try to send with v1 artifact, this should fail + .withDataGenerator(schemaV1::generateMessage).build().test(); + + // if find latest is enabled and we use the v2 schema it should work. Validation is enabled by default + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") + .withDataGenerator(schemaV2::generateMessage).withDataValidator(schemaV2::validateTypeMessage) + .build().test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_VERSION, "2") + .withDataGenerator(schemaV2::generateMessage).withDataValidator(schemaV2::validateTypeMessage) + .build().test(); } @Test @@ -248,19 +204,15 @@ void testConsumeDynamicMessage() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateDynamicMessage) - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, DynamicMessage.class.getName()) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateDynamicMessage) + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + DynamicMessage.class.getName()) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -271,19 +223,15 @@ void testConsumeReturnSpecificClass() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, ProtobufTestMessage.class.getName()) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + ProtobufTestMessage.class.getName()) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -294,19 +242,14 @@ void testFindLatestDeriveClassProtobufTypeTopicIdStrategy() throws Exception { ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) + .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -317,19 +260,15 @@ public void testFindLatestDeriveClassProtobufTypeSimpleTopicIdStrategy() throws ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); - - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .build() - .test(); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); + + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateMessage) + .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true").build().test(); } @Test @@ -341,21 +280,17 @@ public void testFindLatestSpecificProtobufType() throws Exception { String schemaContent = resourceToString("serdes/testmessage.proto"); - createArtifact(topicName, artifactId, ArtifactType.PROTOBUF, schemaContent, ContentTypes.APPLICATION_PROTOBUF, null, null); + createArtifact(topicName, artifactId, ArtifactType.PROTOBUF, schemaContent, + ContentTypes.APPLICATION_PROTOBUF, null, null); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, topicName) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateMessage) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") + .withProducerProperty(SerdeConfig.EXPLICIT_ARTIFACT_GROUP_ID, topicName).build().test(); } @@ -368,55 +303,50 @@ public void testFindLatestDynamicMessageProtobufType() throws Exception { String schemaContent = resourceToString("serdes/testmessage.proto"); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaContent, ContentTypes.APPLICATION_PROTOBUF, null, null); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schemaContent, + ContentTypes.APPLICATION_PROTOBUF, null, null); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(SimpleTopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateDynamicMessage) - .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, DynamicMessage.class.getName()) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer) + .withStrategy(SimpleTopicIdStrategy.class).withDataGenerator(schema::generateMessage) + .withDataValidator(schema::validateDynamicMessage) + .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + DynamicMessage.class.getName()) + .build().test(); } @Test @Tag(Constants.ACCEPTANCE) void testTopicIdStrategyAutoRegister() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") - .withAfterProduceValidator(() -> { - return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); - return true; - }); - }) - .build() - .test(); - - int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 10; - }).getCount(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) + .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") + .withAfterProduceValidator(() -> { + return TestUtils.retry(() -> { + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); + registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); + return true; + }); + }).build().test(); + + int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId) + .versions().get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 10; + }).getCount(); assertEquals(1, versions); } @@ -424,125 +354,111 @@ void testTopicIdStrategyAutoRegister() throws Exception { @Test public void testAutoRegisterDynamicMessageProtobufType() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateDynamicMessage) - .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") - .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, DynamicMessage.class.getName()) - .withAfterProduceValidator(() -> { - return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); - return true; - }); - }) - .build() - .test(); - - int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 10; - }).getCount(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateDynamicMessage) + .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") + .withConsumerProperty(SerdeConfig.DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, + DynamicMessage.class.getName()) + .withAfterProduceValidator(() -> { + return TestUtils.retry(() -> { + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); + registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); + return true; + }); + }).build().test(); + + int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId) + .versions().get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 10; + }).getCount(); assertEquals(1, versions); } @Test public void testAutoRegisterDeriveClassProtobufType() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) - .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") - .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") - .withAfterProduceValidator(() -> { - return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); - registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); - return true; - }); - }) - .build() - .test(); - - int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().get(config -> { - config.queryParameters.offset = 0; - config.queryParameters.limit = 10; - }).getCount(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) + .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") + .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") + .withAfterProduceValidator(() -> { + return TestUtils.retry(() -> { + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); + registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); + return true; + }); + }).build().test(); + + int versions = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId) + .versions().get(config -> { + config.queryParameters.offset = 0; + config.queryParameters.limit = 10; + }).getCount(); assertEquals(1, versions); } @Test public void testAutoRegisterAndUseBody() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateMessage) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateMessage) .withProducerProperty(SerdeConfig.AUTO_REGISTER_ARTIFACT, "true") .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") .withConsumerProperty(ProtobufKafkaDeserializerConfig.DERIVE_CLASS_FROM_SCHEMA, "true") .withAfterProduceValidator(() -> { return TestUtils.retry(() -> { - VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData meta = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("branch=latest") + .get(); registryClient.ids().globalIds().byGlobalId(meta.getGlobalId()).get(); return true; }); - }) - .build() - .test(); + }).build().test(); } @Test public void testFindLatestAndUseBody() throws Exception { String topicName = TestUtils.generateTopic(); - //because of using TopicIdStrategy + // because of using TopicIdStrategy String artifactId = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); ProtobufTestMessageFactory schema = new ProtobufTestMessageFactory(); - createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), ContentTypes.APPLICATION_PROTOBUF, null, null); + createArtifact("default", artifactId, ArtifactType.PROTOBUF, schema.generateSchemaString(), + ContentTypes.APPLICATION_PROTOBUF, null, null); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(serializer) - .withDeserializer(deserializer) - .withStrategy(TopicIdStrategy.class) - .withDataGenerator(schema::generateMessage) - .withDataValidator(schema::validateDynamicMessage) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(serializer).withDeserializer(deserializer).withStrategy(TopicIdStrategy.class) + .withDataGenerator(schema::generateMessage).withDataValidator(schema::validateDynamicMessage) .withProducerProperty(SerdeConfig.FIND_LATEST_ARTIFACT, "true") - .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") - .build() - .test(); + .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false").build().test(); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufTestMessageFactory.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufTestMessageFactory.java index b86776d6e9..6db6db7314 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufTestMessageFactory.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufTestMessageFactory.java @@ -18,21 +18,16 @@ public class ProtobufTestMessageFactory { public ProtobufTestMessage generateMessage(int count) { Date now = new Date(); - return ProtobufTestMessage.newBuilder() - .setBi1(1) - .setD1(now.getTime()) - .setI1(123) - .setS1("a") + return ProtobufTestMessage.newBuilder().setBi1(1).setD1(now.getTime()).setI1(123).setS1("a") .setHeader(Header.newBuilder().setTime(Timestamp.getDefaultInstance()).build()) - .setPoint(Point.newBuilder().setAltitude(1).setLatitude(22).setLongitude(22).build()) - .build(); + .setPoint(Point.newBuilder().setAltitude(1).setLatitude(22).setLongitude(22).build()).build(); } public boolean validateDynamicMessage(DynamicMessage dm) { Descriptors.Descriptor descriptor = dm.getDescriptorForType(); Descriptors.FieldDescriptor fieldI1 = descriptor.findFieldByName("i1"); Object i1 = dm.getField(fieldI1); - return i1 != null && ((Integer)i1).intValue() == 123; + return i1 != null && ((Integer) i1).intValue() == 123; } public boolean validateMessage(ProtobufTestMessage msg) { @@ -52,7 +47,8 @@ public byte[] generateSchemaBytes() { } public InputStream generateArtificialSchemaStream() { - ProtoFileElement element = FileDescriptorUtils.fileDescriptorToProtoFile(ProtobufTestMessage.newBuilder().build().getDescriptorForType().getFile().toProto()); + ProtoFileElement element = FileDescriptorUtils.fileDescriptorToProtoFile( + ProtobufTestMessage.newBuilder().build().getDescriptorForType().getFile().toProto()); return IoUtil.toStream(element.toSchema()); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufUUIDTestMessage.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufUUIDTestMessage.java index 1b9fb82014..833df157c4 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufUUIDTestMessage.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/ProtobufUUIDTestMessage.java @@ -3,9 +3,9 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.squareup.wire.schema.internal.parser.ProtoFileElement; -import io.apicurio.tests.common.serdes.proto.TestCmmn; import io.apicurio.registry.utils.IoUtil; import io.apicurio.registry.utils.protobuf.schema.FileDescriptorUtils; +import io.apicurio.tests.common.serdes.proto.TestCmmn; import java.io.InputStream; import java.util.Date; @@ -30,7 +30,8 @@ public boolean validateTypeMessage(TestCmmn.UUID message) { } public ProtoFileElement generateSchema() { - return FileDescriptorUtils.fileDescriptorToProtoFile(TestCmmn.UUID.newBuilder().build().getDescriptorForType().getFile().toProto()); + return FileDescriptorUtils.fileDescriptorToProtoFile( + TestCmmn.UUID.newBuilder().build().getDescriptorForType().getFile().toProto()); } public InputStream generateSchemaStream() { diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SerdesTester.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SerdesTester.java index 661ba8642a..577a87483e 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SerdesTester.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SerdesTester.java @@ -1,8 +1,8 @@ package io.apicurio.tests.serdes.apicurio; +import io.apicurio.registry.serde.SerdeConfig; import io.apicurio.tests.ApicurioRegistryBaseIT; import io.apicurio.tests.utils.KafkaFacade; -import io.apicurio.registry.serde.SerdeConfig; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroSerializerConfig; import org.apache.kafka.clients.CommonClientConfigs; @@ -40,11 +40,10 @@ public class SerdesTester { private static final String BOOTSTRAP_SERVERS = "localhost:9092"; private static final String MAC_OS_BOOTSTRAP_SERVERS = "docker.host.internal:9092"; - private boolean autoClose = true; public SerdesTester() { - //empty + // empty } /** @@ -54,11 +53,14 @@ public void setAutoClose(boolean autoClose) { this.autoClose = autoClose; } - public Producer createProducer(Class keySerializer, Class valueSerializer, String topicName, Class artifactIdStrategy) { - return createProducer(new Properties(), keySerializer, valueSerializer, topicName, artifactIdStrategy); + public Producer createProducer(Class keySerializer, Class valueSerializer, String topicName, + Class artifactIdStrategy) { + return createProducer(new Properties(), keySerializer, valueSerializer, topicName, + artifactIdStrategy); } - public Producer createProducer(Properties props, Class keySerializerClass, Class valueSerializerClass, String topicName, Class artifactIdStrategy) { + public Producer createProducer(Properties props, Class keySerializerClass, + Class valueSerializerClass, String topicName, Class artifactIdStrategy) { connectionProperties().forEach((k, v) -> { props.putIfAbsent(k, v); }); @@ -69,9 +71,11 @@ public Producer createProducer(Properties props, Class keySerializerCla props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializerClass.getName()); // Schema Registry location. if (valueSerializerClass.getName().contains("confluent")) { - props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7"); + props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, + ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7"); props.putIfAbsent(AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, "false"); - props.putIfAbsent(KafkaAvroSerializerConfig.VALUE_SUBJECT_NAME_STRATEGY, artifactIdStrategy.getName()); + props.putIfAbsent(KafkaAvroSerializerConfig.VALUE_SUBJECT_NAME_STRATEGY, + artifactIdStrategy.getName()); } else { props.putIfAbsent(SerdeConfig.REGISTRY_URL, ApicurioRegistryBaseIT.getRegistryV3ApiUrl()); props.putIfAbsent(SerdeConfig.ARTIFACT_RESOLVER_STRATEGY, artifactIdStrategy.getName()); @@ -80,11 +84,13 @@ public Producer createProducer(Properties props, Class keySerializerCla return new KafkaProducer<>(props); } - public Consumer createConsumer(Class keyDeserializer, Class valueDeserializer, String topicName) { + public Consumer createConsumer(Class keyDeserializer, Class valueDeserializer, + String topicName) { return createConsumer(new Properties(), keyDeserializer, valueDeserializer, topicName); } - public Consumer createConsumer(Properties props, Class keyDeserializer, Class valueDeserializer, String topicName) { + public Consumer createConsumer(Properties props, Class keyDeserializer, + Class valueDeserializer, String topicName) { connectionProperties().forEach((k, v) -> { props.putIfAbsent(k, v); }); @@ -95,9 +101,10 @@ public Consumer createConsumer(Properties props, Class keyDeserializer, props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer.getName()); props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer.getName()); - //Schema registry location. + // Schema registry location. if (valueDeserializer.getName().contains("confluent")) { - props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7"); + props.putIfAbsent(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, + ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7"); } else { props.putIfAbsent(SerdeConfig.REGISTRY_URL, ApicurioRegistryBaseIT.getRegistryV3ApiUrl()); } @@ -105,7 +112,8 @@ public Consumer createConsumer(Properties props, Class keyDeserializer, return new KafkaConsumer<>(props); } - public void produceMessages(Producer producer, String topicName, DataGenerator

dataGenerator, int messageCount) throws Exception { + public void produceMessages(Producer producer, String topicName, DataGenerator

dataGenerator, + int messageCount) throws Exception { CompletableFuture resultPromise = CompletableFuture.supplyAsync(() -> { int producedMessages = 0; @@ -137,14 +145,16 @@ public void produceMessages(Producer producer, String topicName, DataGener }); try { - Integer messagesSent = resultPromise.get((MILLIS_PER_MESSAGE * messageCount) + 2000, TimeUnit.MILLISECONDS); + Integer messagesSent = resultPromise.get((MILLIS_PER_MESSAGE * messageCount) + 2000, + TimeUnit.MILLISECONDS); assertEquals(messageCount, messagesSent.intValue()); } catch (Exception e) { throw e; } } - public void consumeMessages(Consumer consumer, String topicName, int messageCount, Predicate dataValidator) throws Exception { + public void consumeMessages(Consumer consumer, String topicName, int messageCount, + Predicate dataValidator) throws Exception { CompletableFuture resultPromise = CompletableFuture.supplyAsync(() -> { consumer.subscribe(Collections.singletonList(topicName)); @@ -157,16 +167,18 @@ public void consumeMessages(Consumer consumer, String topicName, int messa final ConsumerRecords records = consumer.poll(Duration.ofSeconds(1)); if (records.count() == 0) { LOGGER.info("None found"); - } else records.forEach(record -> { - - if (dataValidator != null) { - assertTrue(dataValidator.test(record.value()), "Consumed record validation failed"); - } - - consumedMessages.getAndIncrement(); - LOGGER.info("{} {} {} {}", record.topic(), - record.partition(), record.offset(), record.value()); - }); + } else + records.forEach(record -> { + + if (dataValidator != null) { + assertTrue(dataValidator.test(record.value()), + "Consumed record validation failed"); + } + + consumedMessages.getAndIncrement(); + LOGGER.info("{} {} {} {}", record.topic(), record.partition(), record.offset(), + record.value()); + }); } LOGGER.info("Consumed {} messages", consumedMessages.get()); @@ -180,7 +192,8 @@ public void consumeMessages(Consumer consumer, String topicName, int messa }); try { - Integer messagesConsumed = resultPromise.get((MILLIS_PER_MESSAGE * messageCount) + 2000, TimeUnit.MILLISECONDS); + Integer messagesConsumed = resultPromise.get((MILLIS_PER_MESSAGE * messageCount) + 2000, + TimeUnit.MILLISECONDS); assertEquals(messageCount, messagesConsumed.intValue()); } catch (Exception e) { throw e; diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SimpleSerdesTesterBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SimpleSerdesTesterBuilder.java index ec3ae7d3fd..36798536ec 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SimpleSerdesTesterBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/SimpleSerdesTesterBuilder.java @@ -16,7 +16,6 @@ import static io.apicurio.tests.serdes.apicurio.Tester.*; import static org.junit.jupiter.api.Assertions.assertTrue; - public class SimpleSerdesTesterBuilder implements TesterBuilder { protected int batchCount = 1; @@ -68,7 +67,8 @@ public > SimpleSerdesTesterBuilder withSerializer( return this; } - public > SimpleSerdesTesterBuilder withDeserializer(Class deserializer) { + public > SimpleSerdesTesterBuilder withDeserializer( + Class deserializer) { this.deserializer = deserializer; return this; } @@ -114,7 +114,6 @@ public Tester build() { return new SimpleSerdesTester(); } - private class SimpleSerdesTester extends SerdesTester implements Tester { /** @@ -122,7 +121,8 @@ private class SimpleSerdesTester extends SerdesTester implements T */ @Override public void test() throws Exception { - Producer producer = this.createProducer(producerProperties, StringSerializer.class, serializer, topic, artifactResolverStrategy); + Producer producer = this.createProducer(producerProperties, StringSerializer.class, + serializer, topic, artifactResolverStrategy); boolean autoCloseByProduceOrConsume = batchCount == 1; setAutoClose(autoCloseByProduceOrConsume); @@ -141,7 +141,8 @@ public void test() throws Exception { assertTrue(afterProduceValidator.validate(), "After produce validation failed"); } - Consumer consumer = this.createConsumer(consumerProperties, StringDeserializer.class, deserializer, topic); + Consumer consumer = this.createConsumer(consumerProperties, StringDeserializer.class, + deserializer, topic); int messageCount = batchCount * batchSize; try { @@ -156,5 +157,4 @@ public void test() throws Exception { } - } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredConsumerTesterBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredConsumerTesterBuilder.java index 386a2e97e0..b68a87bfbc 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredConsumerTesterBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredConsumerTesterBuilder.java @@ -29,7 +29,6 @@ public Tester build() { return new FailingConsumerTester(); } - private class FailingConsumerTester extends SerdesTester implements Tester { /** @@ -37,7 +36,8 @@ private class FailingConsumerTester extends SerdesTester implement */ @Override public void test() throws Exception { - Producer producer = this.createProducer(producerProperties, StringSerializer.class, serializer, topic, artifactResolverStrategy); + Producer producer = this.createProducer(producerProperties, StringSerializer.class, + serializer, topic, artifactResolverStrategy); int messageCount = 10; this.produceMessages(producer, topic, dataGenerator, messageCount); @@ -46,9 +46,11 @@ public void test() throws Exception { assertTrue(afterProduceValidator.validate(), "After produce validation failed"); } - Consumer consumer = this.createConsumer(consumerProperties, StringDeserializer.class, deserializer, topic); + Consumer consumer = this.createConsumer(consumerProperties, StringDeserializer.class, + deserializer, topic); - assertThrows(ExecutionException.class, () -> this.consumeMessages(consumer, topic, messageCount, dataValidator)); + assertThrows(ExecutionException.class, + () -> this.consumeMessages(consumer, topic, messageCount, dataValidator)); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredSerdesTesterBuilder.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredSerdesTesterBuilder.java index c1920ea425..4fa665f7d1 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredSerdesTesterBuilder.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/apicurio/WrongConfiguredSerdesTesterBuilder.java @@ -1,6 +1,5 @@ package io.apicurio.tests.serdes.apicurio; - import io.apicurio.tests.serdes.apicurio.SerdesTester.DataGenerator; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.common.serialization.Serializer; @@ -34,7 +33,8 @@ public WrongConfiguredSerdesTesterBuilder

withProducerProperty(String key, St return this; } - public > WrongConfiguredSerdesTesterBuilder

withSerializer(Class serializer) { + public > WrongConfiguredSerdesTesterBuilder

withSerializer( + Class serializer) { this.serializer = serializer; return this; } @@ -67,7 +67,6 @@ public Tester build() { return new WrongConfiguredSerdesTester(); } - private class WrongConfiguredSerdesTester extends SerdesTester implements Tester { /** @@ -75,9 +74,11 @@ private class WrongConfiguredSerdesTester extends SerdesTester producer = this.createProducer(producerProperties, StringSerializer.class, serializer, topic, artifactResolverStrategy); + Producer producer = this.createProducer(producerProperties, StringSerializer.class, + serializer, topic, artifactResolverStrategy); - assertThrows(ExecutionException.class, () -> this.produceMessages(producer, topic, dataGenerator, 10)); + assertThrows(ExecutionException.class, + () -> this.produceMessages(producer, topic, dataGenerator, 10)); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/serdes/confluent/BasicConfluentSerDesIT.java b/integration-tests/src/test/java/io/apicurio/tests/serdes/confluent/BasicConfluentSerDesIT.java index f4f48eaa6c..ab0e444169 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/serdes/confluent/BasicConfluentSerDesIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/serdes/confluent/BasicConfluentSerDesIT.java @@ -1,22 +1,21 @@ package io.apicurio.tests.serdes.confluent; -import io.apicurio.registry.types.ContentTypes; -import io.apicurio.tests.ConfluentBaseIT; -import io.apicurio.tests.serdes.apicurio.SerdesTester; -import io.apicurio.tests.serdes.apicurio.SimpleSerdesTesterBuilder; -import io.apicurio.tests.serdes.apicurio.WrongConfiguredSerdesTesterBuilder; -import io.apicurio.tests.utils.AvroGenericRecordSchemaFactory; -import io.apicurio.tests.utils.Constants; -import io.apicurio.tests.utils.KafkaFacade; import io.apicurio.registry.serde.SerdeConfig; import io.apicurio.registry.serde.avro.AvroKafkaDeserializer; import io.apicurio.registry.serde.avro.AvroKafkaSerializer; import io.apicurio.registry.serde.avro.strategy.RecordIdStrategy; import io.apicurio.registry.serde.config.IdOption; import io.apicurio.registry.types.ArtifactType; +import io.apicurio.registry.types.ContentTypes; import io.apicurio.registry.utils.IoUtil; import io.apicurio.registry.utils.tests.TestUtils; - +import io.apicurio.tests.ConfluentBaseIT; +import io.apicurio.tests.serdes.apicurio.SerdesTester; +import io.apicurio.tests.serdes.apicurio.SimpleSerdesTesterBuilder; +import io.apicurio.tests.serdes.apicurio.WrongConfiguredSerdesTesterBuilder; +import io.apicurio.tests.utils.AvroGenericRecordSchemaFactory; +import io.apicurio.tests.utils.Constants; +import io.apicurio.tests.utils.KafkaFacade; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.serializers.KafkaAvroDeserializer; @@ -64,20 +63,16 @@ void testAvroConfluentSerDes() throws Exception { String subjectName = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", + List.of("key1")); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(KafkaAvroSerializer.class) - .withDeserializer(KafkaAvroDeserializer.class) - .withStrategy(TopicNameStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(KafkaAvroSerializer.class).withDeserializer(KafkaAvroDeserializer.class) + .withStrategy(TopicNameStrategy.class).withDataGenerator(avroSchema::generateRecord) + .withDataValidator(avroSchema::validateRecord).build().test(); } @Test @@ -86,22 +81,18 @@ void testAvroConfluentApicurio() throws Exception { String subjectName = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", + List.of("key1")); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(KafkaAvroSerializer.class) - .withDeserializer(AvroKafkaDeserializer.class) - .withConsumerProperty(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, "true") - .withConsumerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) - .withStrategy(TopicNameStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(KafkaAvroSerializer.class).withDeserializer(AvroKafkaDeserializer.class) + .withConsumerProperty(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, "true") + .withConsumerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) + .withStrategy(TopicNameStrategy.class).withDataGenerator(avroSchema::generateRecord) + .withDataValidator(avroSchema::validateRecord).build().test(); } @Test @@ -110,25 +101,24 @@ void testAvroApicurioConfluent() throws Exception { String subjectName = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent1", + List.of("key1")); - createArtifact("default", subjectName, ArtifactType.AVRO, avroSchema.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null, null); + createArtifact("default", subjectName, ArtifactType.AVRO, avroSchema.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null, null); - new SimpleSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(AvroKafkaSerializer.class) + new SimpleSerdesTesterBuilder().withTopic(topicName) + .withSerializer(AvroKafkaSerializer.class) - //very important - .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") + // very important + .withProducerProperty(SerdeConfig.ENABLE_HEADERS, "false") - .withProducerProperty(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, "true") - .withProducerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) - .withDeserializer(KafkaAvroDeserializer.class) - .withStrategy(io.apicurio.registry.serde.strategy.TopicIdStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .withDataValidator(avroSchema::validateRecord) - .build() - .test(); + .withProducerProperty(SerdeConfig.ENABLE_CONFLUENT_ID_HANDLER, "true") + .withProducerProperty(SerdeConfig.USE_ID, IdOption.contentId.name()) + .withDeserializer(KafkaAvroDeserializer.class) + .withStrategy(io.apicurio.registry.serde.strategy.TopicIdStrategy.class) + .withDataGenerator(avroSchema::generateRecord).withDataValidator(avroSchema::validateRecord) + .build().test(); } @Test @@ -138,21 +128,19 @@ void testAvroConfluentSerDesFail() throws Exception { String subjectName = "myrecordconfluent2"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, + List.of("key1")); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); - AvroGenericRecordSchemaFactory wrongSchema = new AvroGenericRecordSchemaFactory(subjectName, List.of("wrongkey")); + AvroGenericRecordSchemaFactory wrongSchema = new AvroGenericRecordSchemaFactory(subjectName, + List.of("wrongkey")); - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(KafkaAvroSerializer.class) - .withStrategy(RecordNameStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(wrongSchema::generateRecord) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(KafkaAvroSerializer.class).withStrategy(RecordNameStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(wrongSchema::generateRecord).build().test(); } @@ -162,18 +150,15 @@ void testAvroConfluentSerDesWrongStrategyTopic() throws Exception { String subjectName = "myrecordconfluent3"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, + List.of("key1")); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(KafkaAvroSerializer.class) - .withStrategy(TopicNameStrategy.class) - .withDataGenerator(avroSchema::generateRecord) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(KafkaAvroSerializer.class).withStrategy(TopicNameStrategy.class) + .withDataGenerator(avroSchema::generateRecord).build().test(); } @Test @@ -182,21 +167,19 @@ void testAvroConfluentSerDesWrongStrategyRecord() throws Exception { String subjectName = topicName + "-value"; kafkaCluster.createTopic(topicName, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent4", List.of("key1")); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent4", + List.of("key1")); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); - AvroGenericRecordSchemaFactory wrongSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent4", List.of("wrongkey")); + AvroGenericRecordSchemaFactory wrongSchema = new AvroGenericRecordSchemaFactory("myrecordconfluent4", + List.of("wrongkey")); - new WrongConfiguredSerdesTesterBuilder() - .withTopic(topicName) - .withSerializer(KafkaAvroSerializer.class) - .withStrategy(RecordNameStrategy.class) - //note, we use an incorrect wrong data generator in purpose - .withDataGenerator(wrongSchema::generateRecord) - .build() - .test(); + new WrongConfiguredSerdesTesterBuilder().withTopic(topicName) + .withSerializer(KafkaAvroSerializer.class).withStrategy(RecordNameStrategy.class) + // note, we use an incorrect wrong data generator in purpose + .withDataGenerator(wrongSchema::generateRecord).build().test(); } @Test @@ -210,7 +193,8 @@ void testEvolveAvroConfluent() throws Exception { String recordName = TestUtils.generateSubject(); String subjectName = topicName + "-" + recordName; String schemaKey = "key1"; - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(recordName, List.of(schemaKey)); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(recordName, + List.of(schemaKey)); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); @@ -219,20 +203,26 @@ void testEvolveAvroConfluent() throws Exception { int messageCount = 10; - Producer producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); - Consumer consumer = tester.createConsumer(StringDeserializer.class, KafkaAvroDeserializer.class, topicName); + Producer producer = tester.createProducer(StringSerializer.class, + KafkaAvroSerializer.class, topicName, strategy); + Consumer consumer = tester.createConsumer(StringDeserializer.class, + KafkaAvroDeserializer.class, topicName); tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); tester.consumeMessages(consumer, topicName, messageCount, avroSchema::validateRecord); String schemaKey2 = "key2"; - AvroGenericRecordSchemaFactory avroSchema2 = new AvroGenericRecordSchemaFactory(recordName, List.of(schemaKey, schemaKey2)); - createArtifactVersion("default", subjectName, avroSchema2.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null); + AvroGenericRecordSchemaFactory avroSchema2 = new AvroGenericRecordSchemaFactory(recordName, + List.of(schemaKey, schemaKey2)); + createArtifactVersion("default", subjectName, avroSchema2.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null); - producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, + strategy); tester.produceMessages(producer, topicName, avroSchema2::generateRecord, messageCount); - producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, + strategy); tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); consumer = tester.createConsumer(StringDeserializer.class, KafkaAvroDeserializer.class, topicName); @@ -254,16 +244,21 @@ void testEvolveAvroConfluent() throws Exception { } String schemaKey3 = "key3"; - AvroGenericRecordSchemaFactory avroSchema3 = new AvroGenericRecordSchemaFactory(recordName, List.of(schemaKey, schemaKey2, schemaKey3)); - createArtifactVersion("default", subjectName, avroSchema3.generateSchema().toString(), ContentTypes.APPLICATION_JSON, null); + AvroGenericRecordSchemaFactory avroSchema3 = new AvroGenericRecordSchemaFactory(recordName, + List.of(schemaKey, schemaKey2, schemaKey3)); + createArtifactVersion("default", subjectName, avroSchema3.generateSchema().toString(), + ContentTypes.APPLICATION_JSON, null); - producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, + strategy); tester.produceMessages(producer, topicName, avroSchema3::generateRecord, messageCount); - producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, + strategy); tester.produceMessages(producer, topicName, avroSchema2::generateRecord, messageCount); - producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, strategy); + producer = tester.createProducer(StringSerializer.class, KafkaAvroSerializer.class, topicName, + strategy); tester.produceMessages(producer, topicName, avroSchema::generateRecord, messageCount); consumer = tester.createConsumer(StringDeserializer.class, KafkaAvroDeserializer.class, topicName); @@ -308,7 +303,8 @@ void testAvroConfluentForMultipleTopics() throws Exception { kafkaCluster.createTopic(topicName2, 1, 1); kafkaCluster.createTopic(topicName3, 1, 1); - AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, List.of(schemaKey)); + AvroGenericRecordSchemaFactory avroSchema = new AvroGenericRecordSchemaFactory(subjectName, + List.of(schemaKey)); ParsedSchema pschema = new AvroSchema(IoUtil.toString(avroSchema.generateSchemaBytes())); createArtifactViaConfluentClient(pschema, subjectName); @@ -316,13 +312,18 @@ void testAvroConfluentForMultipleTopics() throws Exception { int messageCount = 10; - Producer producer1 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName1, strategy); - Producer producer2 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName2, strategy); - Producer producer3 = tester.createProducer(StringSerializer.class, AvroKafkaSerializer.class, topicName3, strategy); - Consumer consumer1 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName1); - Consumer consumer2 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName2); - Consumer consumer3 = tester.createConsumer(StringDeserializer.class, AvroKafkaDeserializer.class, topicName3); - + Producer producer1 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName1, strategy); + Producer producer2 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName2, strategy); + Producer producer3 = tester.createProducer(StringSerializer.class, + AvroKafkaSerializer.class, topicName3, strategy); + Consumer consumer1 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName1); + Consumer consumer2 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName2); + Consumer consumer3 = tester.createConsumer(StringDeserializer.class, + AvroKafkaDeserializer.class, topicName3); tester.produceMessages(producer1, topicName1, avroSchema::generateRecord, messageCount); tester.produceMessages(producer2, topicName2, avroSchema::generateRecord, messageCount); @@ -335,4 +336,3 @@ void testAvroConfluentForMultipleTopics() throws Exception { } } - diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/AllArtifactTypesIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/AllArtifactTypesIT.java index 9e95bf19c7..cc4825b903 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/AllArtifactTypesIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/AllArtifactTypesIT.java @@ -52,17 +52,19 @@ void doTest(String v1Resource, String v2Resource, String atype, String contentTy // Test create new version (valid content) CreateVersion testCV = TestUtils.clientCreateVersion(v2Content, contentType); - retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(testCV, config -> { - config.queryParameters.dryRun = true; - })); + retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(testCV, config -> { + config.queryParameters.dryRun = true; + })); // Test create new version (invalid content) retryAssertClientError("RuleViolationException", 409, (rc) -> { String invalidContent = "{\"This is not a valid content."; CreateVersion tcv = TestUtils.clientCreateVersion(invalidContent, contentType); - rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(tcv, config -> { - config.queryParameters.dryRun = true; - }); + rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(tcv, + config -> { + config.queryParameters.dryRun = true; + }); }, errorCodeExtractor); // Update artifact (valid v2 content) @@ -70,12 +72,13 @@ void doTest(String v1Resource, String v2Resource, String atype, String contentTy // Find artifact by content InputStream contentIS = new ByteArrayInputStream(v1Content.getBytes(StandardCharsets.UTF_8)); - VersionSearchResults results = registryClient.search().versions().post(contentIS, contentType, config -> { - config.queryParameters.groupId = groupId; - config.queryParameters.artifactId = artifactId; - config.queryParameters.orderby = VersionSortBy.GlobalId; - config.queryParameters.order = SortOrder.Desc; - }); + VersionSearchResults results = registryClient.search().versions().post(contentIS, contentType, + config -> { + config.queryParameters.groupId = groupId; + config.queryParameters.artifactId = artifactId; + config.queryParameters.orderby = VersionSortBy.GlobalId; + config.queryParameters.order = SortOrder.Desc; + }); assertNotNull(results); assertTrue(results.getCount() > 0); assertNotNull(results.getVersions().get(0).getGlobalId()); @@ -83,19 +86,26 @@ void doTest(String v1Resource, String v2Resource, String atype, String contentTy assertNotNull(results.getVersions().get(0).getVersion()); // Update artifact (invalid content) - CreateVersion createVersion = TestUtils.clientCreateVersion("{\"This is not a valid content.", contentType); - TestUtils.assertClientError("RuleViolationException", 409, () -> - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion), errorCodeExtractor); + CreateVersion createVersion = TestUtils.clientCreateVersion("{\"This is not a valid content.", + contentType); + TestUtils + .assertClientError( + "RuleViolationException", 409, () -> registryClient.groups().byGroupId(groupId) + .artifacts().byArtifactId(artifactId).versions().post(createVersion), + errorCodeExtractor); // Override Validation rule for the artifact createRule.setConfig("NONE"); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().post(createRule); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .post(createRule); // Make sure we have rule - retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules().byRuleType(createRule.getRuleType().name()).get()); + retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).rules() + .byRuleType(createRule.getRuleType().name()).get()); // Update artifact (invalid content) - should work now - VersionMetaData amd2 = createArtifactVersion(groupId, artifactId,"{\"This is not a valid content.", ContentTypes.APPLICATION_JSON, null); + VersionMetaData amd2 = createArtifactVersion(groupId, artifactId, "{\"This is not a valid content.", + ContentTypes.APPLICATION_JSON, null); // Make sure artifact is fully registered retryOp((rc) -> rc.ids().globalIds().byGlobalId(amd2.getGlobalId()).get()); } @@ -103,39 +113,46 @@ void doTest(String v1Resource, String v2Resource, String atype, String contentTy @Test @Tag(ACCEPTANCE) void testAvro() throws Exception { - doTest("avro/multi-field_v1.json", "avro/multi-field_v2.json", ArtifactType.AVRO, ContentTypes.APPLICATION_JSON); + doTest("avro/multi-field_v1.json", "avro/multi-field_v2.json", ArtifactType.AVRO, + ContentTypes.APPLICATION_JSON); } @Test @Tag(ACCEPTANCE) void testProtobuf() throws Exception { - doTest("protobuf/tutorial_v1.proto", "protobuf/tutorial_v2.proto", ArtifactType.PROTOBUF, ContentTypes.APPLICATION_PROTOBUF); + doTest("protobuf/tutorial_v1.proto", "protobuf/tutorial_v2.proto", ArtifactType.PROTOBUF, + ContentTypes.APPLICATION_PROTOBUF); } @Test @Tag(ACCEPTANCE) void testJsonSchema() throws Exception { - doTest("jsonSchema/person_v1.json", "jsonSchema/person_v2.json", ArtifactType.JSON, ContentTypes.APPLICATION_JSON); + doTest("jsonSchema/person_v1.json", "jsonSchema/person_v2.json", ArtifactType.JSON, + ContentTypes.APPLICATION_JSON); } @Test void testKafkaConnect() throws Exception { - doTest("kafkaConnect/simple_v1.json", "kafkaConnect/simple_v2.json", ArtifactType.KCONNECT, ContentTypes.APPLICATION_JSON); + doTest("kafkaConnect/simple_v1.json", "kafkaConnect/simple_v2.json", ArtifactType.KCONNECT, + ContentTypes.APPLICATION_JSON); } @Test void testOpenApi30() throws Exception { - doTest("openapi/3.0-petstore_v1.json", "openapi/3.0-petstore_v2.json", ArtifactType.OPENAPI, ContentTypes.APPLICATION_JSON); + doTest("openapi/3.0-petstore_v1.json", "openapi/3.0-petstore_v2.json", ArtifactType.OPENAPI, + ContentTypes.APPLICATION_JSON); } @Test void testAsyncApi() throws Exception { - doTest("asyncapi/2.0-streetlights_v1.json", "asyncapi/2.0-streetlights_v2.json", ArtifactType.ASYNCAPI, ContentTypes.APPLICATION_JSON); + doTest("asyncapi/2.0-streetlights_v1.json", "asyncapi/2.0-streetlights_v2.json", + ArtifactType.ASYNCAPI, ContentTypes.APPLICATION_JSON); } @Test void testGraphQL() throws Exception { - doTest("graphql/swars_v1.graphql", "graphql/swars_v2.graphql", ArtifactType.GRAPHQL, ContentTypes.APPLICATION_GRAPHQL); + doTest("graphql/swars_v1.graphql", "graphql/swars_v2.graphql", ArtifactType.GRAPHQL, + ContentTypes.APPLICATION_GRAPHQL); } @AfterEach diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/ArtifactsIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/ArtifactsIT.java index ae0a9003cb..5d24153404 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/ArtifactsIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/ArtifactsIT.java @@ -83,15 +83,16 @@ void createEmptyArtifact() throws Exception { assertEquals(artifactId, amd.getArtifactId()); assertEquals(name, amd.getName()); - // Get the list of versions for the artifact: should be 0 versions - var results = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().get(); + // Get the list of versions for the artifact: should be 0 versions + var results = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().get(); assertNotNull(results); assertEquals(0, results.getCount()); assertNotNull(results.getVersions()); assertEquals(0, results.getVersions().size()); } - @Test + @Test @Tag(ACCEPTANCE) void createAndUpdateArtifact() throws Exception { CreateRule createRule = new CreateRule(); @@ -110,41 +111,50 @@ void createAndUpdateArtifact() throws Exception { var artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - CreateArtifactResponse caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, IfArtifactExists.FAIL, null); + CreateArtifactResponse caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, + artifactData, ContentTypes.APPLICATION_JSON, IfArtifactExists.FAIL, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact().toString()); - InputStream latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + InputStream latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get(); JsonNode response = mapper.readTree(latest); - LOGGER.info("Artifact with name:{} and content:{} was created", response.get("name").asText(), response); + LOGGER.info("Artifact with name:{} and content:{} was created", response.get("name").asText(), + response); String invalidArtifactDefinition = "record\ntest"; String invalidArtifactId = "createAndUpdateArtifactId2"; LOGGER.info("Invalid artifact sent {}", invalidArtifactDefinition); - assertClientError("RuleViolationException", 409, () -> - registryClient.groups().byGroupId("ccc").artifacts().post( - TestUtils.clientCreateArtifact(invalidArtifactId, ArtifactType.AVRO, invalidArtifactDefinition, ContentTypes.APPLICATION_JSON) - ), errorCodeExtractor); + assertClientError("RuleViolationException", 409, + () -> registryClient.groups().byGroupId("ccc").artifacts() + .post(TestUtils.clientCreateArtifact(invalidArtifactId, ArtifactType.AVRO, + invalidArtifactDefinition, ContentTypes.APPLICATION_JSON)), + errorCodeExtractor); artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"long\"}]}"; - CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, ContentTypes.APPLICATION_JSON); - VersionMetaData metaData = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(createVersion); + CreateVersion createVersion = TestUtils.clientCreateVersion(artifactData, + ContentTypes.APPLICATION_JSON); + VersionMetaData metaData = registryClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().post(createVersion); LOGGER.info("Artifact with ID {} was updated: {}", artifactId, metaData.toString()); // Make sure artifact is fully registered retryOp((rc) -> rc.ids().globalIds().byGlobalId(metaData.getGlobalId()).get()); - latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("branch=latest").content().get(); response = mapper.readTree(latest); LOGGER.info("Artifact with ID {} was updated: {}", artifactId, response); List apicurioVersions = listArtifactVersions(registryClient, groupId, artifactId); - LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, apicurioVersions.toString()); + LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, + apicurioVersions.toString()); assertThat(apicurioVersions, hasItems("1", "2")); - InputStream version1 = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").content().get(); + InputStream version1 = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1").content().get(); response = mapper.readTree(version1); LOGGER.info("Artifact with ID {} and version {}: {}", artifactId, 1, response); @@ -157,18 +167,18 @@ void createAndDeleteMultipleArtifacts() throws Exception { LOGGER.info("Creating some artifacts..."); String groupId = TestUtils.generateGroupId(); - List artifacts = IntStream.range(0, 10) - .mapToObj(i -> { - String artifactId = TestUtils.generateSubject(); - try { - String content = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("foo")).generateSchema().toString(); - String ct = ContentTypes.APPLICATION_JSON; - return createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ct, null, null).getVersion(); - } catch (Exception e) { - throw new RuntimeException(e); - } - }) - .collect(Collectors.toList()); + List artifacts = IntStream.range(0, 10).mapToObj(i -> { + String artifactId = TestUtils.generateSubject(); + try { + String content = new AvroGenericRecordSchemaFactory(groupId, artifactId, List.of("foo")) + .generateSchema().toString(); + String ct = ContentTypes.APPLICATION_JSON; + return createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ct, null, null) + .getVersion(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }).collect(Collectors.toList()); LOGGER.info("Created {} artifacts", artifacts.size()); @@ -176,8 +186,10 @@ void createAndDeleteMultipleArtifacts() throws Exception { registryClient.groups().byGroupId(groupId).delete(); for (VersionMetaData artifact : artifacts) { - retryAssertClientError("ArtifactNotFoundException", 404, (rc) -> - rc.groups().byGroupId(artifact.getGroupId()).artifacts().byArtifactId(artifact.getArtifactId()).get(), errorCodeExtractor); + retryAssertClientError("ArtifactNotFoundException", 404, + (rc) -> rc.groups().byGroupId(artifact.getGroupId()).artifacts() + .byArtifactId(artifact.getArtifactId()).get(), + errorCodeExtractor); } } @@ -188,11 +200,13 @@ void createNonAvroArtifact() throws Exception { String artifactData = "{\"type\":\"INVALID\",\"config\":\"invalid\"}"; String artifactId = TestUtils.generateArtifactId(); - var caResponse = createArtifact(groupId, artifactId, ArtifactType.JSON, artifactData, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.JSON, artifactData, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact()); - InputStream latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").content().get(); + InputStream latest = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").content().get(); JsonNode response = mapper.readTree(latest); LOGGER.info("Got info about artifact with ID {}: {}", artifactId, response); @@ -205,16 +219,19 @@ void createArtifactSpecificVersion() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; String artifactId = TestUtils.generateArtifactId(); - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact()); artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"; - var metaData = createArtifactVersion(groupId, artifactId, artifactData, ContentTypes.APPLICATION_JSON, null); + var metaData = createArtifactVersion(groupId, artifactId, artifactData, ContentTypes.APPLICATION_JSON, + null); LOGGER.info("Artifact with ID {} was updated: {}", artifactId, metaData); List artifactVersions = listArtifactVersions(registryClient, groupId, artifactId); - LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, artifactVersions.toString()); + LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, + artifactVersions.toString()); assertThat(artifactVersions, hasItems("1", "2")); } @@ -224,12 +241,15 @@ void testDuplicatedArtifact() throws Exception { String artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact().toString()); String invalidArtifactData = "{\"type\":\"record\",\"name\":\"alreadyExistArtifact\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - assertClientError("ArtifactAlreadyExistsException", 409, () -> - createArtifact(groupId, artifactId, ArtifactType.AVRO, invalidArtifactData, ContentTypes.APPLICATION_JSON, null, null), true, errorCodeExtractor); + assertClientError( + "ArtifactAlreadyExistsException", 409, () -> createArtifact(groupId, artifactId, + ArtifactType.AVRO, invalidArtifactData, ContentTypes.APPLICATION_JSON, null, null), + true, errorCodeExtractor); } @Test @@ -239,17 +259,20 @@ void testVersionAlreadyExistsIfExistsCreateVersion() throws Exception { String groupId = TestUtils.generateGroupId(); String artifactId = TestUtils.generateArtifactId(); - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, (ca) -> { - ca.getFirstVersion().setVersion("1.1"); - }); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, (ca) -> { + ca.getFirstVersion().setVersion("1.1"); + }); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact().toString()); String sameArtifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - assertClientError("VersionAlreadyExistsException", 409, () -> - createArtifact(groupId, artifactId, ArtifactType.AVRO, sameArtifactData, ContentTypes.APPLICATION_JSON, IfArtifactExists.CREATE_VERSION, (ca) -> { - ca.getFirstVersion().setVersion("1.1"); - }), true, errorCodeExtractor); + assertClientError("VersionAlreadyExistsException", 409, + () -> createArtifact(groupId, artifactId, ArtifactType.AVRO, sameArtifactData, + ContentTypes.APPLICATION_JSON, IfArtifactExists.CREATE_VERSION, (ca) -> { + ca.getFirstVersion().setVersion("1.1"); + }), + true, errorCodeExtractor); } @Test @@ -261,50 +284,65 @@ void testDisableEnableArtifactVersion() throws Exception { String artifactDataV3 = "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; // Create the artifact - VersionMetaData v1MD = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null).getVersion(); + VersionMetaData v1MD = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null).getVersion(); LOGGER.info("Created artifact {} with metadata {}", artifactId, v1MD.toString()); // Update the artifact (v2) - VersionMetaData v2MD = createArtifactVersion(groupId, artifactId, artifactDataV2, ContentTypes.APPLICATION_JSON, null); + VersionMetaData v2MD = createArtifactVersion(groupId, artifactId, artifactDataV2, + ContentTypes.APPLICATION_JSON, null); // Update the artifact (v3) - VersionMetaData v3MD = createArtifactVersion(groupId, artifactId, artifactDataV3, ContentTypes.APPLICATION_JSON, null); + VersionMetaData v3MD = createArtifactVersion(groupId, artifactId, artifactDataV3, + ContentTypes.APPLICATION_JSON, null); // Disable v3 - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v3MD.getVersion())).put(toEditableVersionMetaData(VersionState.DISABLED)); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v3MD.getVersion())) + .put(toEditableVersionMetaData(VersionState.DISABLED)); // Verify artifact retryOp((rc) -> { - VersionMetaData actualMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData actualMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); assertEquals("2", actualMD.getVersion()); // Verify v1 - VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); + VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); // Verify v2 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v2MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v2MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); // Verify v3 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v3MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v3MD.getVersion())).get(); assertEquals(VersionState.DISABLED, actualVMD.getState()); }); // Re-enable v3 - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v3MD.getVersion())).put(toEditableVersionMetaData(VersionState.ENABLED)); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v3MD.getVersion())) + .put(toEditableVersionMetaData(VersionState.ENABLED)); retryOp((rc) -> { // Verify artifact (now v3) - VersionMetaData actualMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("branch=latest").get(); + VersionMetaData actualMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("branch=latest").get(); assertEquals("3", actualMD.getVersion()); // version 2 is active (3 is disabled) // Verify v1 - VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); + VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); // Verify v2 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v2MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v2MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); // Verify v3 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v3MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v3MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); }); } @@ -318,35 +356,45 @@ void testDeprecateArtifactVersion() throws Exception { String artifactDataV3 = "{\"type\":\"record\",\"name\":\"myrecord3\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; // Create the artifact - VersionMetaData v1MD = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null).getVersion(); + VersionMetaData v1MD = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null).getVersion(); LOGGER.info("Created artifact {} with metadata {}", artifactId, v1MD.toString()); // Update the artifact (v2) - VersionMetaData v2MD = createArtifactVersion(groupId, artifactId, artifactDataV2, ContentTypes.APPLICATION_JSON, null); + VersionMetaData v2MD = createArtifactVersion(groupId, artifactId, artifactDataV2, + ContentTypes.APPLICATION_JSON, null); // Update the artifact (v3) - VersionMetaData v3MD = createArtifactVersion(groupId, artifactId, artifactDataV3, ContentTypes.APPLICATION_JSON, null); + VersionMetaData v3MD = createArtifactVersion(groupId, artifactId, artifactDataV3, + ContentTypes.APPLICATION_JSON, null); // Deprecate v2 - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v2MD.getVersion())).put(toEditableVersionMetaData(VersionState.DEPRECATED)); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v2MD.getVersion())) + .put(toEditableVersionMetaData(VersionState.DEPRECATED)); retryOp((rc) -> { // Verify v1 - VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); + VersionMetaData actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression(String.valueOf(v1MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); // Verify v2 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v2MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v2MD.getVersion())).get(); assertEquals(VersionState.DEPRECATED, actualVMD.getState()); // Verify v3 - actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(String.valueOf(v3MD.getVersion())).get(); + actualVMD = rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression(String.valueOf(v3MD.getVersion())).get(); assertEquals(VersionState.ENABLED, actualVMD.getState()); }); } @Test void deleteNonexistingSchema() throws Exception { - assertClientError("ArtifactNotFoundException", 404, () -> - registryClient.groups().byGroupId("nonn-existent-group").artifacts().byArtifactId("non-existing").get(), errorCodeExtractor); + assertClientError( + "ArtifactNotFoundException", 404, () -> registryClient.groups() + .byGroupId("nonn-existent-group").artifacts().byArtifactId("non-existing").get(), + errorCodeExtractor); } @Test @@ -355,7 +403,8 @@ void testAllowedSpecialCharacters() throws Exception { String artifactId = "._:-'`?0=)(/&$!<>,;,:"; String content = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, null, null); + createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, null, + null); registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); @@ -375,13 +424,13 @@ void testAllowedSpecialCharacters() throws Exception { }); CreateVersion testCV = TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(testCV, config -> { - config.queryParameters.dryRun = true; - }); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(testCV, config -> { + config.queryParameters.dryRun = true; + }); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post( - TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON) - ); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON)); registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); @@ -397,7 +446,8 @@ void testAllowedSpecialCharactersCreateViaApi() throws Exception { ArtifactContent artifactContent = new ArtifactContent(); artifactContent.setContent(content); - createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, null, null); + createArtifact(groupId, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, null, + null); retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get()); @@ -419,13 +469,13 @@ void testAllowedSpecialCharactersCreateViaApi() throws Exception { }); CreateVersion testCV = TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(testCV, config -> { - config.queryParameters.dryRun = true; - }); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(testCV, config -> { + config.queryParameters.dryRun = true; + }); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post( - TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON) - ); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(TestUtils.clientCreateVersion(content, ContentTypes.APPLICATION_JSON)); registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); @@ -441,7 +491,8 @@ public void testSearchOrderBy() throws Exception { for (int idx = 0; idx < 5; idx++) { String artifactId = "test-" + idx; Thread.sleep(idx == 0 ? 0 : 1500 / idx); - this.createArtifact(group, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, null, null); + this.createArtifact(group, artifactId, ArtifactType.AVRO, content, ContentTypes.APPLICATION_JSON, + null, null); } ArtifactSearchResults results = registryClient.search().artifacts().get(config -> { @@ -472,4 +523,3 @@ void deleteRules() throws Exception { } } } - diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/LoadIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/LoadIT.java index 9e4a6c489f..cc8975b084 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/LoadIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/LoadIT.java @@ -5,106 +5,106 @@ import org.junit.jupiter.api.Disabled; /** - * Disabled because this is too flaky and it needs to be redesigned to align better - * with common usage of the registry, which is frequent reads sporadic writes. - * - * + * Disabled because this is too flaky and it needs to be redesigned to align better with common usage of the + * registry, which is frequent reads sporadic writes. */ -//@Tag(SMOKE) +// @Tag(SMOKE) @Disabled @QuarkusIntegrationTest public class LoadIT extends ApicurioRegistryBaseIT { -// private static final Logger LOGGER = LoggerFactory.getLogger(LoadIT.class); -// -// private String base = TestUtils.generateArtifactId(); -// -// @Test -// void concurrentLoadTest() throws Exception { -// -// Queue artifactsQueue = new ConcurrentLinkedQueue<>(); -// AtomicBoolean deleteLoopFlag = new AtomicBoolean(true); -// AtomicBoolean allCreatedFlag = new AtomicBoolean(false); -// -// Future deleteingResult = CompletableFuture.supplyAsync(() -> { -// Throwable result = null; -// while (deleteLoopFlag.get()) { -// String artifactId = artifactsQueue.poll(); -// try { -// if (artifactId != null) { -// LOGGER.info("Delete artifact {} START", artifactId); -// registryClient.deleteArtifact(artifactId); -// TestUtils.assertClientError(ArtifactNotFoundException.class.getSimpleName(), 404, () -> registryClient.getArtifactMetaData(artifactId), true, errorCodeExtractor); -// LOGGER.info("Delete artifact {} FINISH", artifactId); -// } else if (allCreatedFlag.get()) { -// return null; -// } -// } catch (Exception e) { -// LOGGER.info("Requeue artifact {}", artifactId); -// result = e; -// artifactsQueue.offer(artifactId); -// } -// } -// LOGGER.info("All artifacts deleted"); -// return result; -// }, runnable -> new Thread(runnable).start()); -// -// try { -// List> createResults = IntStream.range(0, 250).mapToObj(i -> { -// return createArtifactAsync(registryClient, i) -// .thenAccept(m -> -// artifactsQueue.offer(m.getId()) -// ); -// }).collect(Collectors.toList()); -// -// CompletableFuture.allOf(createResults.toArray(new CompletableFuture[0])) -// .get(60, TimeUnit.SECONDS); -// LOGGER.info("All artifacts created"); -// allCreatedFlag.set(true); -// } catch (Exception e) { -// deleteLoopFlag.set(false); -// LOGGER.error("Error creating artifacts", e); -// throw e; -// } -// -// try { -// Throwable result = deleteingResult.get(120, TimeUnit.SECONDS); -// if (result != null) { -// deleteLoopFlag.set(false); -// throw new IllegalStateException("Error deleteing artifacts", result); -// } -// } catch (TimeoutException e) { -// LOGGER.info("Artifacts not deleted are {}", registryClient.listArtifacts().toString()); -// throw e; -// } -// -// assertEquals(0, registryClient.listArtifacts().size()); -// -// } -// -// CompletionStage createArtifactAsync(RegistryRestClient client, int i) { -// return CompletableFuture.supplyAsync(() -> { -// String artifactId = base + i; -// -// LOGGER.info("Create artifact {} START", artifactId); -// -// String artifactDefinition = "{\"type\":\"INVALID\",\"config\":\"invalid\"}"; -// ByteArrayInputStream artifactData = new ByteArrayInputStream(artifactDefinition.getBytes(StandardCharsets.UTF_8)); -// try { -// ArtifactMetaData amd = client.createArtifact(artifactId, ArtifactType.JSON, artifactData); -// -// // Make sure artifact is fully registered -// TestUtils.retry(() -> client.getArtifactMetaDataByGlobalId(amd.getGlobalId())); -// -// LOGGER.info("Create artifact {} FINISH", amd.getId()); -// assertEquals(artifactId, amd.getId()); -// Thread.sleep(1); -// return amd; -// } catch (Exception e) { -// LOGGER.error("Error creating artifact " + artifactId, e); -// throw new CompletionException("Error creating artifact", e); -// } -// }, runnable -> new Thread(runnable).start()); -// } + // private static final Logger LOGGER = LoggerFactory.getLogger(LoadIT.class); + // + // private String base = TestUtils.generateArtifactId(); + // + // @Test + // void concurrentLoadTest() throws Exception { + // + // Queue artifactsQueue = new ConcurrentLinkedQueue<>(); + // AtomicBoolean deleteLoopFlag = new AtomicBoolean(true); + // AtomicBoolean allCreatedFlag = new AtomicBoolean(false); + // + // Future deleteingResult = CompletableFuture.supplyAsync(() -> { + // Throwable result = null; + // while (deleteLoopFlag.get()) { + // String artifactId = artifactsQueue.poll(); + // try { + // if (artifactId != null) { + // LOGGER.info("Delete artifact {} START", artifactId); + // registryClient.deleteArtifact(artifactId); + // TestUtils.assertClientError(ArtifactNotFoundException.class.getSimpleName(), 404, () -> + // registryClient.getArtifactMetaData(artifactId), true, errorCodeExtractor); + // LOGGER.info("Delete artifact {} FINISH", artifactId); + // } else if (allCreatedFlag.get()) { + // return null; + // } + // } catch (Exception e) { + // LOGGER.info("Requeue artifact {}", artifactId); + // result = e; + // artifactsQueue.offer(artifactId); + // } + // } + // LOGGER.info("All artifacts deleted"); + // return result; + // }, runnable -> new Thread(runnable).start()); + // + // try { + // List> createResults = IntStream.range(0, 250).mapToObj(i -> { + // return createArtifactAsync(registryClient, i) + // .thenAccept(m -> + // artifactsQueue.offer(m.getId()) + // ); + // }).collect(Collectors.toList()); + // + // CompletableFuture.allOf(createResults.toArray(new CompletableFuture[0])) + // .get(60, TimeUnit.SECONDS); + // LOGGER.info("All artifacts created"); + // allCreatedFlag.set(true); + // } catch (Exception e) { + // deleteLoopFlag.set(false); + // LOGGER.error("Error creating artifacts", e); + // throw e; + // } + // + // try { + // Throwable result = deleteingResult.get(120, TimeUnit.SECONDS); + // if (result != null) { + // deleteLoopFlag.set(false); + // throw new IllegalStateException("Error deleteing artifacts", result); + // } + // } catch (TimeoutException e) { + // LOGGER.info("Artifacts not deleted are {}", registryClient.listArtifacts().toString()); + // throw e; + // } + // + // assertEquals(0, registryClient.listArtifacts().size()); + // + // } + // + // CompletionStage createArtifactAsync(RegistryRestClient client, int i) { + // return CompletableFuture.supplyAsync(() -> { + // String artifactId = base + i; + // + // LOGGER.info("Create artifact {} START", artifactId); + // + // String artifactDefinition = "{\"type\":\"INVALID\",\"config\":\"invalid\"}"; + // ByteArrayInputStream artifactData = new + // ByteArrayInputStream(artifactDefinition.getBytes(StandardCharsets.UTF_8)); + // try { + // ArtifactMetaData amd = client.createArtifact(artifactId, ArtifactType.JSON, artifactData); + // + // // Make sure artifact is fully registered + // TestUtils.retry(() -> client.getArtifactMetaDataByGlobalId(amd.getGlobalId())); + // + // LOGGER.info("Create artifact {} FINISH", amd.getId()); + // assertEquals(artifactId, amd.getId()); + // Thread.sleep(1); + // return amd; + // } catch (Exception e) { + // LOGGER.error("Error creating artifact " + artifactId, e); + // throw new CompletionException("Error creating artifact", e); + // } + // }, runnable -> new Thread(runnable).start()); + // } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/MetadataIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/MetadataIT.java index 1ccb14a5be..a474d8fa27 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/MetadataIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/MetadataIT.java @@ -33,14 +33,18 @@ void getAndUpdateMetadataOfArtifact() throws Exception { String artifactId = TestUtils.generateArtifactId(); String artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactDefinition, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactDefinition, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact()); - ArtifactMetaData artifactMetaData = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).get(); + ArtifactMetaData artifactMetaData = registryClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).get(); LOGGER.info("Got metadata of artifact with ID {}: {}", artifactId, artifactMetaData); - assertThat(artifactMetaData.getCreatedOn().toInstant().toEpochMilli(), OrderingComparison.greaterThan(0L)); - assertThat(artifactMetaData.getModifiedOn().toInstant().toEpochMilli(), OrderingComparison.greaterThan(0L)); + assertThat(artifactMetaData.getCreatedOn().toInstant().toEpochMilli(), + OrderingComparison.greaterThan(0L)); + assertThat(artifactMetaData.getModifiedOn().toInstant().toEpochMilli(), + OrderingComparison.greaterThan(0L)); assertThat(artifactMetaData.getArtifactId(), is(artifactId)); assertThat(artifactMetaData.getArtifactType(), is("AVRO")); @@ -68,19 +72,23 @@ void getAndUpdateMetadataOfArtifactSpecificVersion() throws Exception { String artifactId = TestUtils.generateArtifactId(); String artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactDefinition, ContentTypes.APPLICATION_JSON, null, (ca) -> { - ca.getFirstVersion().setName("Version 1 Name"); - }); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactDefinition, + ContentTypes.APPLICATION_JSON, null, (ca) -> { + ca.getFirstVersion().setName("Version 1 Name"); + }); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact()); String artifactUpdateDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"; - var metaData = createArtifactVersion(groupId, artifactId, artifactUpdateDefinition, ContentTypes.APPLICATION_JSON, null); + var metaData = createArtifactVersion(groupId, artifactId, artifactUpdateDefinition, + ContentTypes.APPLICATION_JSON, null); LOGGER.info("Artifact with ID {} was updated: {}", artifactId, metaData); - retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("2").get()); + retryOp((rc) -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("2").get()); - VersionMetaData versionMetaData = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("2").get(); + VersionMetaData versionMetaData = registryClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression("2").get(); LOGGER.info("Got metadata of artifact with ID {}: {}", artifactId, versionMetaData); @@ -91,14 +99,16 @@ void getAndUpdateMetadataOfArtifactSpecificVersion() throws Exception { emd.setName("Version 2 Name"); emd.setDescription("The description of the artifact."); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("2").put(emd); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .byVersionExpression("2").put(emd); - versionMetaData = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression("1").get(); + versionMetaData = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression("1").get(); LOGGER.info("Got metadata of artifact with ID {} version 1: {}", artifactId, versionMetaData); assertThat(versionMetaData.getVersion(), is("1")); assertThat(versionMetaData.getArtifactType(), is("AVRO")); assertThat(versionMetaData.getName(), is("Version 1 Name")); - assertThat(versionMetaData.getDescription(), nullValue()); + assertThat(versionMetaData.getDescription(), nullValue()); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/RulesResourceIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/RulesResourceIT.java index 152717eeb1..c16acc53e7 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/RulesResourceIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/apicurio/RulesResourceIT.java @@ -55,12 +55,12 @@ void createAndDeleteGlobalRules() throws Exception { // Check the rules were created. List rules = registryClient.admin().rules().get(); assertThat(rules.size(), is(3)); - + // Check the rules were configured properly. Rule ruleConfig = registryClient.admin().rules().byRuleType(RuleType.VALIDITY.name()).get(); assertNotNull(ruleConfig); assertEquals(ValidityLevel.SYNTAX_ONLY.name(), ruleConfig.getConfig()); - + ruleConfig = registryClient.admin().rules().byRuleType(RuleType.INTEGRITY.name()).get(); assertNotNull(ruleConfig); assertEquals(IntegrityLevel.ALL_REFS_MAPPED.name(), ruleConfig.getConfig()); @@ -77,13 +77,17 @@ void createAndDeleteGlobalRules() throws Exception { assertThat(rules.size(), is(0)); // Should be null/error (never configured the COMPATIBILITY rule) - retryAssertClientError("RuleNotFoundException", 404, (rc) -> rc.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get(), errorCodeExtractor); + retryAssertClientError("RuleNotFoundException", 404, + (rc) -> rc.admin().rules().byRuleType(RuleType.COMPATIBILITY.name()).get(), + errorCodeExtractor); // Should be null/error (deleted the VALIDITY rule) - retryAssertClientError("RuleNotFoundException", 404, (rc) -> rc.admin().rules().byRuleType(RuleType.VALIDITY.name()).get(), errorCodeExtractor); + retryAssertClientError("RuleNotFoundException", 404, + (rc) -> rc.admin().rules().byRuleType(RuleType.VALIDITY.name()).get(), errorCodeExtractor); // Should be null/error (deleted the INTEGRITY rule) - retryAssertClientError("RuleNotFoundException", 404, (rc) -> rc.admin().rules().byRuleType(RuleType.INTEGRITY.name()).get(), errorCodeExtractor); + retryAssertClientError("RuleNotFoundException", 404, + (rc) -> rc.admin().rules().byRuleType(RuleType.INTEGRITY.name()).get(), errorCodeExtractor); } @Test @@ -98,29 +102,38 @@ void createAndValidateGlobalRules() throws Exception { TestUtils.retry(() -> registryClient.admin().rules().post(createRule)); LOGGER.info("Created rule: {} - {}", createRule.getRuleType(), createRule.getConfig()); - TestUtils.assertClientError("RuleAlreadyExistsException", 409, () -> registryClient.admin().rules().post(createRule), true, errorCodeExtractor); + TestUtils.assertClientError("RuleAlreadyExistsException", 409, + () -> registryClient.admin().rules().post(createRule), true, errorCodeExtractor); String invalidArtifactDefinition = "record\ntest"; String artifactId = TestUtils.generateArtifactId(); LOGGER.info("Invalid artifact sent {}", invalidArtifactDefinition); - TestUtils.assertClientError("RuleViolationException", 409, () -> - createArtifact(groupId, artifactId, ArtifactType.AVRO, invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null, null), errorCodeExtractor); - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> - createArtifactVersion(groupId, artifactId, invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null), errorCodeExtractor); + TestUtils.assertClientError( + "RuleViolationException", 409, () -> createArtifact(groupId, artifactId, ArtifactType.AVRO, + invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null, null), + errorCodeExtractor); + TestUtils + .assertClientError( + "ArtifactNotFoundException", 404, () -> createArtifactVersion(groupId, artifactId, + invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null), + errorCodeExtractor); String artifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"long\"}]}"; - var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId, caResponse.getArtifact().toString()); artifactData = "{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"bar\",\"type\":\"long\"}]}"; - var metaData = createArtifactVersion(groupId, artifactId, artifactData, ContentTypes.APPLICATION_JSON, null); + var metaData = createArtifactVersion(groupId, artifactId, artifactData, ContentTypes.APPLICATION_JSON, + null); LOGGER.info("Artifact with Id:{} was updated:{}", artifactId, metaData.toString()); retryOp((rc) -> { List artifactVersions = listArtifactVersions(rc, groupId, artifactId); - LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, artifactVersions.toString()); + LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId, + artifactVersions.toString()); assertThat(artifactVersions, hasItems("1", "2")); }); } @@ -133,46 +146,59 @@ void createAndValidateArtifactRule() throws Exception { String artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; String artifactData = artifactDefinition; - var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactData, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactData, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId1, caResponse.getArtifact()); String artifactId2 = TestUtils.generateArtifactId(); artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - caResponse = createArtifact(groupId, artifactId2, ArtifactType.AVRO, artifactDefinition, ContentTypes.APPLICATION_JSON, null, null); + caResponse = createArtifact(groupId, artifactId2, ArtifactType.AVRO, artifactDefinition, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId2, caResponse.getArtifact()); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule); - LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), artifactId1); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .post(createRule); + LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), + artifactId1); - TestUtils.assertClientError("RuleAlreadyExistsException", 409, () -> - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule), true, errorCodeExtractor); + TestUtils.assertClientError( + "RuleAlreadyExistsException", 409, () -> registryClient.groups().byGroupId(groupId) + .artifacts().byArtifactId(artifactId1).rules().post(createRule), + true, errorCodeExtractor); String invalidArtifactDefinition = "record\ntest"; - TestUtils.assertClientError("RuleViolationException", 409, () -> - createArtifactVersion(groupId, artifactId1, invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null), errorCodeExtractor); + TestUtils + .assertClientError( + "RuleViolationException", 409, () -> createArtifactVersion(groupId, artifactId1, + invalidArtifactDefinition, ContentTypes.APPLICATION_JSON, null), + errorCodeExtractor); String updatedArtifactData = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"long\"}]}"; artifactData = updatedArtifactData; - var metaData = createArtifactVersion(groupId, artifactId2, artifactData, ContentTypes.APPLICATION_JSON, null); + var metaData = createArtifactVersion(groupId, artifactId2, artifactData, + ContentTypes.APPLICATION_JSON, null); LOGGER.info("Artifact with ID {} was updated: {}", artifactId2, metaData.toString()); artifactData = updatedArtifactData; - metaData = createArtifactVersion(groupId, artifactId1, artifactData, ContentTypes.APPLICATION_JSON, null); + metaData = createArtifactVersion(groupId, artifactId1, artifactData, ContentTypes.APPLICATION_JSON, + null); LOGGER.info("Artifact with ID {} was updated: {}", artifactId1, metaData.toString()); retryOp((rc) -> { List artifactVersions = listArtifactVersions(rc, groupId, artifactId1); - LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId1, artifactVersions.toString()); + LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId1, + artifactVersions.toString()); assertThat(artifactVersions, hasItems("1", "2")); artifactVersions = listArtifactVersions(rc, groupId, artifactId2); - LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId2, artifactVersions.toString()); + LOGGER.info("Available versions of artifact with ID {} are: {}", artifactId2, + artifactVersions.toString()); assertThat(artifactVersions, hasItems("1", "2")); }); } @@ -184,48 +210,59 @@ void testDeleteAllArtifactRules() throws Exception { String artifactId1 = TestUtils.generateArtifactId(); String artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactDefinition, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactDefinition, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId1, caResponse.getArtifact()); // Validity rule CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig(ValidityLevel.SYNTAX_ONLY.name()); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule); - LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), artifactId1); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .post(createRule); + LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), + artifactId1); // Compatibility rule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig(CompatibilityLevel.FULL.name()); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule); - LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), artifactId1); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .post(createRule); + LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), + artifactId1); // Integrity rule createRule = new CreateRule(); createRule.setRuleType(RuleType.INTEGRITY); createRule.setConfig(IntegrityLevel.NO_DUPLICATES.name()); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule); - LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), artifactId1); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .post(createRule); + LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), + artifactId1); // Check that all the rules exist. - List rules = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().get(); + List rules = registryClient.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId1).rules().get(); assertThat(rules.size(), is(3)); - + // Check that the Integrity rule is configured - Rule rule = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().byRuleType(RuleType.INTEGRITY.name()).get(); + Rule rule = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .byRuleType(RuleType.INTEGRITY.name()).get(); assertThat(rule.getConfig(), is(IntegrityLevel.NO_DUPLICATES.name())); - + // Delete all rules. registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().delete(); // Check that no rules exist. - rules = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().get(); + rules = registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .get(); assertThat(rules.size(), is(0)); // Check that the integrity rule is not found. TestUtils.assertClientError("RuleNotFoundException", 404, () -> { - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().byRuleType(RuleType.INTEGRITY.name()).get(); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .byRuleType(RuleType.INTEGRITY.name()).get(); }, errorCodeExtractor); } @@ -236,27 +273,35 @@ void testRulesDeletedWithArtifact() throws Exception { String artifactId1 = TestUtils.generateArtifactId(); String artifactDefinition = "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; - var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactDefinition, ContentTypes.APPLICATION_JSON, null, null); + var caResponse = createArtifact(groupId, artifactId1, ArtifactType.AVRO, artifactDefinition, + ContentTypes.APPLICATION_JSON, null, null); LOGGER.info("Created artifact {} with metadata {}", artifactId1, caResponse.getArtifact()); CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("SYNTAX_ONLY"); - registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().post(createRule); - LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), artifactId1); + registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules() + .post(createRule); + LOGGER.info("Created rule: {} - {} for artifact {}", createRule.getRuleType(), createRule.getConfig(), + artifactId1); registryClient.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).delete(); retryOp((rc) -> { - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).get(), errorCodeExtractor); + TestUtils.assertClientError("ArtifactNotFoundException", 404, + () -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).get(), + errorCodeExtractor); assertThat(rc.groups().byGroupId(groupId).artifacts().get().getCount(), is(0)); - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> - rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().get(), errorCodeExtractor); - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> - rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().byRuleType(RuleType.VALIDITY.name()).get(), errorCodeExtractor); + TestUtils.assertClientError("ArtifactNotFoundException", 404, + () -> rc.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId1).rules().get(), + errorCodeExtractor); + TestUtils.assertClientError( + "ArtifactNotFoundException", 404, () -> rc.groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId1).rules().byRuleType(RuleType.VALIDITY.name()).get(), + errorCodeExtractor); }); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentConfigUtils.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentConfigUtils.java index 86141f14cb..5c98b65d91 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentConfigUtils.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentConfigUtils.java @@ -13,7 +13,8 @@ public class ConfluentConfigUtils { public static Response testCompatibility(String body, String schemaName, int returnCode) { try { - URL url = new URL(ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7/compatibility/subjects/" + schemaName + "/versions/latest"); + URL url = new URL(ApicurioRegistryBaseIT.getRegistryApiUrl() + + "/ccompat/v7/compatibility/subjects/" + schemaName + "/versions/latest"); return BaseHttpUtils.rulesPostRequest(RestConstants.SR, body, url, returnCode); } catch (MalformedURLException e) { throw new UncheckedIOException(e); @@ -23,7 +24,8 @@ public static Response testCompatibility(String body, String schemaName, int ret public static Response createGlobalCompatibilityConfig(String typeOfCompatibility) { try { URL url = new URL(ApicurioRegistryBaseIT.getRegistryApiUrl() + "/ccompat/v7/config"); - return BaseHttpUtils.putRequest(RestConstants.SR, "{\"compatibility\":\"" + typeOfCompatibility + "\"}", url, 200); + return BaseHttpUtils.putRequest(RestConstants.SR, + "{\"compatibility\":\"" + typeOfCompatibility + "\"}", url, 200); } catch (MalformedURLException e) { throw new UncheckedIOException(e); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentSubjectsUtils.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentSubjectsUtils.java index 338e855075..1eefa2b58d 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentSubjectsUtils.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/ConfluentSubjectsUtils.java @@ -11,23 +11,26 @@ import static io.apicurio.tests.utils.BaseHttpUtils.putRequest; - public class ConfluentSubjectsUtils { public static Response getAllSchemas(int returnCode) { - return BaseHttpUtils.postRequest(RestConstants.JSON, "", getCCompatURL("/ccompat/v7/subjects"), returnCode); + return BaseHttpUtils.postRequest(RestConstants.JSON, "", getCCompatURL("/ccompat/v7/subjects"), + returnCode); } public static Response getLatestVersionSchema(String nameOfSchema) { - return BaseHttpUtils.postRequest(RestConstants.JSON, "", getCCompatURL("/ccompat/v7/subjects/" + nameOfSchema + "/versions/latest"), 200); + return BaseHttpUtils.postRequest(RestConstants.JSON, "", + getCCompatURL("/ccompat/v7/subjects/" + nameOfSchema + "/versions/latest"), 200); } public static Response createSchema(String schemeDefinition, String schemaName, int returnCode) { - return BaseHttpUtils.postRequest(RestConstants.JSON, schemeDefinition, getCCompatURL("/ccompat/v7/subjects/" + schemaName + "/versions"), returnCode); + return BaseHttpUtils.postRequest(RestConstants.JSON, schemeDefinition, + getCCompatURL("/ccompat/v7/subjects/" + schemaName + "/versions"), returnCode); } public static Response updateSchemaMetadata(String schemaName, String metadata, int returnCode) { - return putRequest(RestConstants.JSON, metadata, getCCompatURL("/ccompat/v7/subjects/" + schemaName + "/meta"), returnCode); + return putRequest(RestConstants.JSON, metadata, + getCCompatURL("/ccompat/v7/subjects/" + schemaName + "/meta"), returnCode); } private static URL getCCompatURL(String ccompatPath) { diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/MetadataConfluentIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/MetadataConfluentIT.java index 3000a6ee9a..e8ad3a3174 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/MetadataConfluentIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/MetadataConfluentIT.java @@ -1,7 +1,7 @@ package io.apicurio.tests.smokeTests.confluent; -import io.apicurio.tests.ConfluentBaseIT; import io.apicurio.registry.utils.tests.TestUtils; +import io.apicurio.tests.ConfluentBaseIT; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.SchemaMetadata; @@ -28,7 +28,8 @@ public class MetadataConfluentIT extends ConfluentBaseIT { @Test @Tag(ACCEPTANCE) void getAndUpdateMetadataOfSchema() throws IOException, RestClientException, TimeoutException { - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); String schemaSubject = TestUtils.generateArtifactId(); int schemaId = createArtifactViaConfluentClient(schema, schemaSubject); @@ -40,7 +41,10 @@ void getAndUpdateMetadataOfSchema() throws IOException, RestClientException, Tim assertThat(schemaMetadata.getId(), is(schemaId)); assertThat(schemaMetadata.getVersion(), is(1)); - assertThat("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}", is(schemaMetadata.getSchema())); - // IMPORTANT NOTE: we can not test schema metadata, because they are mapping on the same endpoint when we are creating the schema... + assertThat( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}", + is(schemaMetadata.getSchema())); + // IMPORTANT NOTE: we can not test schema metadata, because they are mapping on the same endpoint when + // we are creating the schema... } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/RulesResourceConfluentIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/RulesResourceConfluentIT.java index 07be6527cd..fbd7262c9a 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/RulesResourceConfluentIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/RulesResourceConfluentIT.java @@ -1,8 +1,8 @@ package io.apicurio.tests.smokeTests.confluent; -import io.apicurio.tests.ConfluentBaseIT; import io.apicurio.registry.rest.client.models.RuleType; import io.apicurio.registry.utils.tests.TestUtils; +import io.apicurio.tests.ConfluentBaseIT; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.quarkus.test.junit.QuarkusIntegrationTest; diff --git a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/SchemasConfluentIT.java b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/SchemasConfluentIT.java index a8901c9b32..407710f622 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/SchemasConfluentIT.java +++ b/integration-tests/src/test/java/io/apicurio/tests/smokeTests/confluent/SchemasConfluentIT.java @@ -1,24 +1,24 @@ package io.apicurio.tests.smokeTests.confluent; import io.apicurio.registry.rest.client.models.CreateRule; -import io.apicurio.tests.ConfluentBaseIT; -import io.apicurio.tests.utils.ArtifactUtils; -import io.apicurio.tests.utils.Constants; import io.apicurio.registry.rest.client.models.Rule; import io.apicurio.registry.rest.client.models.RuleType; import io.apicurio.registry.utils.tests.TestUtils; +import io.apicurio.tests.ConfluentBaseIT; +import io.apicurio.tests.utils.ArtifactUtils; +import io.apicurio.tests.utils.Constants; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.quarkus.test.junit.QuarkusIntegrationTest; import io.restassured.response.Response; +import jakarta.ws.rs.WebApplicationException; import org.apache.avro.SchemaParseException; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import jakarta.ws.rs.WebApplicationException; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeoutException; @@ -41,13 +41,16 @@ public class SchemasConfluentIT extends ConfluentBaseIT { void createAndUpdateSchema() throws Exception { String artifactId = TestUtils.generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); - ParsedSchema updatedSchema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"foo2\",\"type\":\"long\"}]}"); + ParsedSchema updatedSchema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord2\",\"fields\":[{\"name\":\"foo2\",\"type\":\"long\"}]}"); createArtifactViaConfluentClient(updatedSchema, artifactId); - assertThrows(SchemaParseException.class, () -> new AvroSchema("record\ntest")); + assertThrows(SchemaParseException.class, + () -> new AvroSchema("record\ntest")); assertThat(confluentService.getAllVersions(artifactId), hasItems(1, 2)); confluentService.deleteSubject(artifactId); @@ -62,7 +65,8 @@ void createAndDeleteMultipleSchemas() throws IOException, RestClientException, T for (int i = 0; i < 50; i++) { String name = "myrecord" + i; String subjectName = prefix + i; - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, subjectName); } @@ -86,9 +90,11 @@ void createAndDeleteMultipleSchemas() throws IOException, RestClientException, T void deleteSchemasSpecificVersion() throws Exception { String artifactId = TestUtils.generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"mynewrecord\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"mynewrecord\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); - schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecordx\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}"); + schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecordx\",\"fields\":[{\"name\":\"foo1\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); List schemeVersions = confluentService.getAllVersions(artifactId); @@ -101,20 +107,21 @@ void deleteSchemasSpecificVersion() throws Exception { LOGGER.info("Available version of schema with name:{} are {}", artifactId, schemeVersions); assertThat(schemeVersions, hasItems(1)); - schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecordx\",\"fields\":[{\"name\":\"foo" + 4 + "\",\"type\":\"string\"}]}"); + schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecordx\",\"fields\":[{\"name\":\"foo" + 4 + + "\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); confluentService.deleteSchemaVersion(artifactId, "2"); confluentService.deleteSchemaVersion(artifactId, "2", true); - - TestUtils.waitFor("all specific schema version deletion", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { - try { - return confluentService.getAllVersions(artifactId).size() == 2; - } catch (IOException | RestClientException e) { - return false; - } - }); + TestUtils.waitFor("all specific schema version deletion", Constants.POLL_INTERVAL, + Constants.TIMEOUT_GLOBAL, () -> { + try { + return confluentService.getAllVersions(artifactId).size() == 2; + } catch (IOException | RestClientException e) { + return false; + } + }); schemeVersions = confluentService.getAllVersions(artifactId); @@ -130,15 +137,18 @@ void deleteSchemasSpecificVersion() throws Exception { void createSchemaSpecifyVersion() throws Exception { String artifactId = TestUtils.generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, artifactId); - ParsedSchema updatedArtifact = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); + ParsedSchema updatedArtifact = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"bar\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(updatedArtifact, artifactId); List schemaVersions = confluentService.getAllVersions(artifactId); - LOGGER.info("Available versions of schema with NAME {} are: {}", artifactId, schemaVersions.toString()); + LOGGER.info("Available versions of schema with NAME {} are: {}", artifactId, + schemaVersions.toString()); assertThat(schemaVersions, hasItems(1, 2)); confluentService.deleteSubject(artifactId); @@ -154,11 +164,13 @@ void deleteNonexistingSchema() { @Test void createInvalidSchemaDefinition() throws Exception { String subjectName = TestUtils.generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema( + "{\"type\":\"record\",\"name\":\"myrecord1\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); createArtifactViaConfluentClient(schema, subjectName); TestUtils.waitFor("artifactCreated", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { - return registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).get() != null; + return registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName) + .get() != null; }); String invalidSchema = "{\"schema\":\"{\\\"type\\\": \\\"bloop\\\"}\"}"; @@ -166,24 +178,27 @@ void createInvalidSchemaDefinition() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.COMPATIBILITY); createRule.setConfig("BACKWARD"); - registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().post(createRule); + registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules() + .post(createRule); TestUtils.waitFor("artifact rule created", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { try { - Rule r = registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); + Rule r = registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName) + .rules().byRuleType(RuleType.COMPATIBILITY.name()).get(); return r != null && r.getConfig() != null && r.getConfig().equalsIgnoreCase("BACKWARD"); } catch (WebApplicationException e) { return false; } }); ConfluentSubjectsUtils.createSchema(invalidSchema, subjectName, 422); - } + } @Test void createConfluentQueryApicurio() throws IOException, RestClientException, TimeoutException { String name = "schemaname"; String subjectName = TestUtils.generateArtifactId(); - String rawSchema = "{\"type\":\"record\",\"name\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; + String rawSchema = "{\"type\":\"record\",\"name\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"; ParsedSchema schema = new AvroSchema(rawSchema); createArtifactViaConfluentClient(schema, subjectName); @@ -199,22 +214,27 @@ void testCreateDeleteSchemaRuleIsDeleted() throws Exception { String name = "schemaname"; String subjectName = TestUtils.generateArtifactId(); - ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"" + name + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); + ParsedSchema schema = new AvroSchema("{\"type\":\"record\",\"name\":\"" + name + + "\",\"fields\":[{\"name\":\"foo\",\"type\":\"string\"}]}"); long contentId = createArtifactViaConfluentClient(schema, subjectName); assertThat(1, is(confluentService.getAllSubjects().size())); - TestUtils.waitFor("waiting for content to be created", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { - try { - return registryClient.ids().contentIds().byContentId(contentId).get().readAllBytes().length > 0; - } catch (IOException cnfe) { - return false; - } - }); + TestUtils.waitFor("waiting for content to be created", Constants.POLL_INTERVAL, + Constants.TIMEOUT_GLOBAL, () -> { + try { + return registryClient.ids().contentIds().byContentId(contentId).get() + .readAllBytes().length > 0; + } catch (IOException cnfe) { + return false; + } + }); TestUtils.waitFor("artifact created", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { try { - return registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).versions().byVersionExpression("branch=latest").content().get().readAllBytes().length > 0; + return registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName) + .versions().byVersionExpression("branch=latest").content().get() + .readAllBytes().length > 0; } catch (WebApplicationException e) { return false; } catch (IOException e) { @@ -225,18 +245,21 @@ void testCreateDeleteSchemaRuleIsDeleted() throws Exception { CreateRule createRule = new CreateRule(); createRule.setRuleType(RuleType.VALIDITY); createRule.setConfig("FULL"); - registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().post(createRule); + registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules() + .post(createRule); TestUtils.waitFor("artifact rule created", Constants.POLL_INTERVAL, Constants.TIMEOUT_GLOBAL, () -> { try { - Rule r = registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().byRuleType(RuleType.VALIDITY.name()).get(); + Rule r = registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName) + .rules().byRuleType(RuleType.VALIDITY.name()).get(); return r != null && r.getConfig() != null && r.getConfig().equalsIgnoreCase("FULL"); } catch (WebApplicationException e) { return false; } }); - List rules = registryClient.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().get(); + List rules = registryClient.groups().byGroupId("default").artifacts() + .byArtifactId(subjectName).rules().get(); assertThat(1, is(rules.size())); confluentService.deleteSubject(subjectName); @@ -252,11 +275,18 @@ void testCreateDeleteSchemaRuleIsDeleted() throws Exception { confluentService.deleteSubject(subjectName, true); retryOp((rc) -> { - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> rc.groups().byGroupId("default").artifacts().byArtifactId(subjectName).get(), errorCodeExtractor); - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> rc.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().get(), errorCodeExtractor); - TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> rc.groups().byGroupId("default").artifacts().byArtifactId(subjectName).rules().byRuleType(rules.get(0).name()).get(), errorCodeExtractor); + TestUtils.assertClientError("ArtifactNotFoundException", 404, + () -> rc.groups().byGroupId("default").artifacts().byArtifactId(subjectName).get(), + errorCodeExtractor); + TestUtils.assertClientError("ArtifactNotFoundException", 404, () -> rc.groups() + .byGroupId("default").artifacts().byArtifactId(subjectName).rules().get(), + errorCodeExtractor); + TestUtils.assertClientError( + "ArtifactNotFoundException", 404, () -> rc.groups().byGroupId("default").artifacts() + .byArtifactId(subjectName).rules().byRuleType(rules.get(0).name()).get(), + errorCodeExtractor); }); - //if rule was actually deleted creating same artifact again shouldn't fail + // if rule was actually deleted creating same artifact again shouldn't fail createArtifactViaConfluentClient(schema, subjectName); assertThat(1, is(confluentService.getAllSubjects().size())); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/AbstractTestDataInitializer.java b/integration-tests/src/test/java/io/apicurio/tests/utils/AbstractTestDataInitializer.java index 14e91e1ba2..8dba0034e4 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/AbstractTestDataInitializer.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/AbstractTestDataInitializer.java @@ -34,7 +34,7 @@ public Map start() { @Override public void stop() { - //Once the data is set, stop the old registry before running the tests. + // Once the data is set, stop the old registry before running the tests. if (registryContainer != null && registryContainer.isRunning()) { registryContainer.stop(); } @@ -43,14 +43,13 @@ public void stop() { public String startRegistryApplication(String imageName) { int hostPort = 8081; int containerExposedPort = 8081; - Consumer cmd = e -> e.withPortBindings(new PortBinding(Ports.Binding.bindPort(hostPort), new ExposedPort(containerExposedPort))); + Consumer cmd = e -> e.withPortBindings( + new PortBinding(Ports.Binding.bindPort(hostPort), new ExposedPort(containerExposedPort))); registryContainer = new GenericContainer<>(imageName) - .withEnv(Map.of( - "QUARKUS_HTTP_PORT", "8081", - "REGISTRY_APIS_V2_DATE_FORMAT","yyyy-MM-dd'T'HH:mm:ss'Z'")) - .withExposedPorts(containerExposedPort) - .withCreateContainerCmdModifier(cmd); + .withEnv(Map.of("QUARKUS_HTTP_PORT", "8081", "REGISTRY_APIS_V2_DATE_FORMAT", + "yyyy-MM-dd'T'HH:mm:ss'Z'")) + .withExposedPorts(containerExposedPort).withCreateContainerCmdModifier(cmd); registryContainer.start(); registryContainer.waitingFor(Wait.forLogMessage(".*Installed features:*", 1)); diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/ArtifactUtils.java b/integration-tests/src/test/java/io/apicurio/tests/utils/ArtifactUtils.java index 7b9d536f57..a1591f14ff 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/ArtifactUtils.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/ArtifactUtils.java @@ -11,12 +11,19 @@ public class ArtifactUtils { public static Response getArtifact(String groupId, String artifactId, String version, int returnCode) { - return - BaseHttpUtils.getRequest(RestConstants.JSON, ApicurioRegistryBaseIT.getRegistryV3ApiUrl() + "/groups/" + encodeURIComponent(groupId) + "/artifacts/" + encodeURIComponent(artifactId) + "/versions/" + version + "/content", returnCode); + return BaseHttpUtils.getRequest(RestConstants.JSON, + ApicurioRegistryBaseIT.getRegistryV3ApiUrl() + "/groups/" + encodeURIComponent(groupId) + + "/artifacts/" + encodeURIComponent(artifactId) + "/versions/" + version + + "/content", + returnCode); } - public static Response createArtifact(String groupId, String artifactId, String artifact, int returnCode) { - return BaseHttpUtils.artifactPostRequest(artifactId, RestConstants.JSON, artifact, ApicurioRegistryBaseIT.getRegistryV3ApiUrl() + "/groups/" + encodeURIComponent(groupId) + "/artifacts", returnCode); + public static Response createArtifact(String groupId, String artifactId, String artifact, + int returnCode) { + return BaseHttpUtils.artifactPostRequest(artifactId, RestConstants.JSON, artifact, + ApicurioRegistryBaseIT.getRegistryV3ApiUrl() + "/groups/" + encodeURIComponent(groupId) + + "/artifacts", + returnCode); } // ================================================================================ diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/AvroGenericRecordSchemaFactory.java b/integration-tests/src/test/java/io/apicurio/tests/utils/AvroGenericRecordSchemaFactory.java index 6431dd85e0..1dcde815ef 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/AvroGenericRecordSchemaFactory.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/AvroGenericRecordSchemaFactory.java @@ -49,22 +49,12 @@ public AvroGenericRecordSchemaFactory(List schemaKeys) { public Schema generateSchema() { if (schema == null) { - StringBuilder builder = new StringBuilder() - .append("{\"type\":\"record\"") - .append(",") - .append("\"name\":") - .append("\"") - .append(recordName) - .append("\""); + StringBuilder builder = new StringBuilder().append("{\"type\":\"record\"").append(",") + .append("\"name\":").append("\"").append(recordName).append("\""); if (this.namespace != null) { - builder.append(",") - .append("\"namespace\":") - .append("\"") - .append(this.namespace) - .append("\""); + builder.append(",").append("\"namespace\":").append("\"").append(this.namespace).append("\""); } - builder.append(",") - .append("\"fields\":["); + builder.append(",").append("\"fields\":["); boolean first = true; for (String schemaKey : schemaKeys) { if (!first) { diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/BaseHttpUtils.java b/integration-tests/src/test/java/io/apicurio/tests/utils/BaseHttpUtils.java index 67883edcc7..1a2bf398b0 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/BaseHttpUtils.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/BaseHttpUtils.java @@ -9,155 +9,70 @@ public class BaseHttpUtils { public static Response getRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(endpoint) - .then() - .statusCode(returnCode) - .extract() + return given().when().contentType(contentType).get(endpoint).then().statusCode(returnCode).extract() .response(); } public static Response getRequest(String contentType, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(endpoint) - .then() - .statusCode(returnCode) - .extract() + return given().when().contentType(contentType).get(endpoint).then().statusCode(returnCode).extract() .response(); } public static Response postRequest(String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .post(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).post(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response postRequest(String contentType, String body, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .post(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).post(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response putRequest(String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .put(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).put(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response putRequest(String contentType, String body, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(body) - .put(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(body).put(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response deleteRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .delete(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).delete(endpoint).then().statusCode(returnCode) + .extract().response(); } - public static Response rulesPostRequest(String contentType, String rule, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .post(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + public static Response rulesPostRequest(String contentType, String rule, String endpoint, + int returnCode) { + return given().when().contentType(contentType).body(rule).post(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response rulesPostRequest(String contentType, String rule, URL endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .post(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(rule).post(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response rulesGetRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .get(endpoint) - .then() - .statusCode(returnCode) - .extract() + return given().when().contentType(contentType).get(endpoint).then().statusCode(returnCode).extract() .response(); } public static Response rulesPutRequest(String contentType, String rule, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .body(rule) - .put(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).body(rule).put(endpoint).then().statusCode(returnCode) + .extract().response(); } public static Response rulesDeleteRequest(String contentType, String endpoint, int returnCode) { - return given() - .when() - .contentType(contentType) - .delete(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + return given().when().contentType(contentType).delete(endpoint).then().statusCode(returnCode) + .extract().response(); } - public static Response artifactPostRequest(String artifactId, String contentType, String body, String endpoint, int returnCode) { - return given() - .when() - .header("X-Registry-Artifactid", artifactId) - .contentType(contentType) - .body(body) - .post(endpoint) - .then() - .statusCode(returnCode) - .extract() - .response(); + public static Response artifactPostRequest(String artifactId, String contentType, String body, + String endpoint, int returnCode) { + return given().when().header("X-Registry-Artifactid", artifactId).contentType(contentType).body(body) + .post(endpoint).then().statusCode(returnCode).extract().response(); } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/Constants.java b/integration-tests/src/test/java/io/apicurio/tests/utils/Constants.java index 9339880a9d..b9caf6ab80 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/Constants.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/Constants.java @@ -18,7 +18,8 @@ public interface Constants { */ String SMOKE = "smoke"; /** - * Tag for tests, which are working with the cluster (integration of kafka with registries) such as serdes and converters + * Tag for tests, which are working with the cluster (integration of kafka with registries) such as serdes + * and converters */ String SERDES = "serdes"; /** @@ -37,7 +38,8 @@ public interface Constants { String CLUSTERED = "clustered"; /** - * Tag for migration tests, the suite will deploy two registries and perform data migration between the two + * Tag for migration tests, the suite will deploy two registries and perform data migration between the + * two */ String MIGRATION = "migration"; @@ -47,7 +49,8 @@ public interface Constants { String AUTH = "auth"; /** - * Tag for kafkasql snapshotting tests, the test will be executed only when the storage variant is kafkasql. + * Tag for kafkasql snapshotting tests, the test will be executed only when the storage variant is + * kafkasql. */ String KAFKA_SQL_SNAPSHOTTING = "kafkasql-snapshotting"; @@ -57,8 +60,9 @@ public interface Constants { String SQL = "sqlit"; /** - * Tag for sql storage db schema upgrade tests. Consists of one test that deploys an older version of the registry, populates the db, and then deploys the latest version of the registry. - * Used to test the db schema upgrade process. + * Tag for sql storage db schema upgrade tests. Consists of one test that deploys an older version of the + * registry, populates the db, and then deploys the latest version of the registry. Used to test the db + * schema upgrade process. */ String DB_UPGRADE = "dbupgrade"; diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/ExtensionContextParameterResolver.java b/integration-tests/src/test/java/io/apicurio/tests/utils/ExtensionContextParameterResolver.java index ea07f2bc35..6bba750346 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/ExtensionContextParameterResolver.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/ExtensionContextParameterResolver.java @@ -7,12 +7,14 @@ public class ExtensionContextParameterResolver implements ParameterResolver { @Override - public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { return parameterContext.getParameter().getType() == ExtensionContext.class; } @Override - public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { return extensionContext; } } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/KafkaFacade.java b/integration-tests/src/test/java/io/apicurio/tests/utils/KafkaFacade.java index f7a63ff269..fef48b9eec 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/KafkaFacade.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/KafkaFacade.java @@ -29,7 +29,7 @@ public static KafkaFacade getInstance() { } private KafkaFacade() { - //hidden constructor, singleton class + // hidden constructor, singleton class } public void createTopic(String topic, int partitions, int replicationFactor) { @@ -69,7 +69,8 @@ public void start() { this.kafkaContainer = new StrimziKafkaContainer(); kafkaContainer.addEnv("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1"); kafkaContainer.addEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1"); - kafkaContainer.addEnv("KAFKA_ADVERTISED_LISTENERS", "PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:9092"); + kafkaContainer.addEnv("KAFKA_ADVERTISED_LISTENERS", + "PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:9092"); kafkaContainer.start(); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/LimitingProxy.java b/integration-tests/src/test/java/io/apicurio/tests/utils/LimitingProxy.java index 24f9f7ac65..d44af461b4 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/LimitingProxy.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/LimitingProxy.java @@ -50,19 +50,17 @@ public CompletableFuture start() { CompletableFuture serverFuture = new CompletableFuture<>(); - server = vertx.createHttpServer(new HttpServerOptions() - .setPort(port)) - .requestHandler(this::proxyRequest) - .listen(server -> { - if (server.succeeded()) { - logger.info("Proxy server started on port {}", port); - logger.info("Proxying server {}:{}", destinationHost, destinationPort); - serverFuture.complete(server.result()); - } else { - logger.error("Error starting server", server.cause()); - serverFuture.completeExceptionally(server.cause()); - } - }); + server = vertx.createHttpServer(new HttpServerOptions().setPort(port)) + .requestHandler(this::proxyRequest).listen(server -> { + if (server.succeeded()) { + logger.info("Proxy server started on port {}", port); + logger.info("Proxying server {}:{}", destinationHost, destinationPort); + serverFuture.complete(server.result()); + } else { + logger.error("Error starting server", server.cause()); + serverFuture.completeExceptionally(server.cause()); + } + }); return serverFuture; } @@ -93,8 +91,8 @@ private void proxyRequest(HttpServerRequest req) { req.pause(); client.request(req.method(), destinationPort, destinationHost, req.uri()) - .onSuccess(clientReq -> executeProxy(clientReq, req)) - .onFailure(throwable -> logger.error("Error found creating request", throwable)); + .onSuccess(clientReq -> executeProxy(clientReq, req)) + .onFailure(throwable -> logger.error("Error found creating request", throwable)); } private void executeProxy(HttpClientRequest clientReq, HttpServerRequest req) { @@ -106,7 +104,8 @@ private void executeProxy(HttpClientRequest clientReq, HttpServerRequest req) { req.response().headers().setAll(clientRes.headers()); clientRes.handler(data -> req.response().write(data)); clientRes.endHandler((v) -> req.response().end()); - clientRes.exceptionHandler(e -> logger.error("Error caught in response of request to serverless", e)); + clientRes.exceptionHandler( + e -> logger.error("Error caught in response of request to serverless", e)); req.response().exceptionHandler(e -> logger.error("Error caught in response to client", e)); } else { logger.error("Error in async result", reqResult.cause()); diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/RateLimitingProxy.java b/integration-tests/src/test/java/io/apicurio/tests/utils/RateLimitingProxy.java index abf7b87913..8dc0dd05dd 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/RateLimitingProxy.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/RateLimitingProxy.java @@ -12,7 +12,8 @@ public class RateLimitingProxy extends LimitingProxy { public RateLimitingProxy(int failAfterRequests, String destinationHost, int destinationPort) { super(destinationHost, destinationPort); // this will rate limit just based on total requests - // that means that if buckets=3 the proxy will successfully redirect the first 3 requests and every request after that will be rejected with 429 status + // that means that if buckets=3 the proxy will successfully redirect the first 3 requests and every + // request after that will be rejected with 429 status this.buckets = failAfterRequests; } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/RegistryWaitUtils.java b/integration-tests/src/test/java/io/apicurio/tests/utils/RegistryWaitUtils.java index 094b20a4ca..bb6cb097af 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/RegistryWaitUtils.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/RegistryWaitUtils.java @@ -15,7 +15,8 @@ public interface FunctionExc { R run(T t) throws Exception; } - public static void retry(RegistryClient registryClient, ConsumerExc registryOp) throws Exception { + public static void retry(RegistryClient registryClient, ConsumerExc registryOp) + throws Exception { TestUtils.retry(() -> registryOp.run(registryClient)); } diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/RetryLimitingProxy.java b/integration-tests/src/test/java/io/apicurio/tests/utils/RetryLimitingProxy.java index c6c1ba3fe6..ad63fb8ec2 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/RetryLimitingProxy.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/RetryLimitingProxy.java @@ -5,7 +5,6 @@ /** * Block `n` first requests, then allow the rest through. - * */ public class RetryLimitingProxy extends LimitingProxy { diff --git a/integration-tests/src/test/java/io/apicurio/tests/utils/TestSeparator.java b/integration-tests/src/test/java/io/apicurio/tests/utils/TestSeparator.java index 9519fe2728..143a9b786d 100644 --- a/integration-tests/src/test/java/io/apicurio/tests/utils/TestSeparator.java +++ b/integration-tests/src/test/java/io/apicurio/tests/utils/TestSeparator.java @@ -17,12 +17,14 @@ public interface TestSeparator { @BeforeEach default void beforeEachTest(TestInfo testInfo) { SEPARATOR_LOGGER.info(String.join("", Collections.nCopies(76, SEPARATOR_CHAR))); - SEPARATOR_LOGGER.info(String.format("%s.%s-STARTED", testInfo.getTestClass().get().getName(), testInfo.getTestMethod().get().getName())); + SEPARATOR_LOGGER.info(String.format("%s.%s-STARTED", testInfo.getTestClass().get().getName(), + testInfo.getTestMethod().get().getName())); } @AfterEach default void afterEachTest(TestInfo testInfo) { - SEPARATOR_LOGGER.info(String.format("%s.%s-FINISHED", testInfo.getTestClass().get().getName(), testInfo.getTestMethod().get().getName())); + SEPARATOR_LOGGER.info(String.format("%s.%s-FINISHED", testInfo.getTestClass().get().getName(), + testInfo.getTestMethod().get().getName())); SEPARATOR_LOGGER.info(String.join("", Collections.nCopies(76, SEPARATOR_CHAR))); } } \ No newline at end of file diff --git a/java-sdk/pom.xml b/java-sdk/pom.xml index bb461fb8f9..f58aa12c28 100644 --- a/java-sdk/pom.xml +++ b/java-sdk/pom.xml @@ -14,101 +14,99 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - - apicurio-registry - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - - 4.0.0 - apicurio-registry-java-sdk - - 1.1.14 - 0.0.17 - https://github.com/microsoft/kiota/releases/download - - - - com.microsoft.kiota - microsoft-kiota-abstractions - ${kiota.libs.version} - - - io.kiota - kiota-http-vertx - ${kiota.community.version} - - - io.kiota - kiota-serialization-jackson - ${kiota.community.version} - - - com.microsoft.kiota - microsoft-kiota-serialization-text - ${kiota.libs.version} - - - com.microsoft.kiota - microsoft-kiota-serialization-form - ${kiota.libs.version} - - - com.microsoft.kiota - microsoft-kiota-serialization-multipart - ${kiota.libs.version} - - - jakarta.annotation - jakarta.annotation-api - - - io.vertx - vertx-auth-oauth2 - - - - - - kiota-maven-plugin - io.kiota - ${kiota.community.version} - - - - generate - - - - - 1.10.1 - ${kiota.base.url} - ../common/src/main/resources/META-INF/openapi.json - io.apicurio.registry.rest.client - RegistryClient - - - - org.codehaus.mojo - build-helper-maven-plugin - - - add-source - generate-sources - - add-source - - - - ${project.build.directory}/generated-sources/kiota/ - - - - - - - + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + + apicurio-registry-java-sdk + + 1.1.14 + 0.0.17 + https://github.com/microsoft/kiota/releases/download + + + + com.microsoft.kiota + microsoft-kiota-abstractions + ${kiota.libs.version} + + + io.kiota + kiota-http-vertx + ${kiota.community.version} + + + io.kiota + kiota-serialization-jackson + ${kiota.community.version} + + + com.microsoft.kiota + microsoft-kiota-serialization-text + ${kiota.libs.version} + + + com.microsoft.kiota + microsoft-kiota-serialization-form + ${kiota.libs.version} + + + com.microsoft.kiota + microsoft-kiota-serialization-multipart + ${kiota.libs.version} + + + jakarta.annotation + jakarta.annotation-api + + + io.vertx + vertx-auth-oauth2 + + + + + + io.kiota + kiota-maven-plugin + ${kiota.community.version} + + 1.10.1 + ${kiota.base.url} + ../common/src/main/resources/META-INF/openapi.json + io.apicurio.registry.rest.client + RegistryClient + + + + + generate + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + add-source + + add-source + + generate-sources + + + ${project.build.directory}/generated-sources/kiota/ + + + + + + + diff --git a/java-sdk/src/main/java/io/apicurio/registry/client/auth/VertXAuthFactory.java b/java-sdk/src/main/java/io/apicurio/registry/client/auth/VertXAuthFactory.java index 29a6cbd6aa..f0ac0f573a 100644 --- a/java-sdk/src/main/java/io/apicurio/registry/client/auth/VertXAuthFactory.java +++ b/java-sdk/src/main/java/io/apicurio/registry/client/auth/VertXAuthFactory.java @@ -20,22 +20,22 @@ public static WebClient buildOIDCWebClient(String tokenUrl, String clientId, Str return buildOIDCWebClient(defaultVertx, tokenUrl, clientId, clientSecret, null); } - public static WebClient buildOIDCWebClient(Vertx vertx, String tokenUrl, String clientId, String clientSecret) { + public static WebClient buildOIDCWebClient(Vertx vertx, String tokenUrl, String clientId, + String clientSecret) { return buildOIDCWebClient(tokenUrl, clientId, clientSecret, null); } - public static WebClient buildOIDCWebClient(String tokenUrl, String clientId, String clientSecret, String scope) { + public static WebClient buildOIDCWebClient(String tokenUrl, String clientId, String clientSecret, + String scope) { return buildOIDCWebClient(defaultVertx, tokenUrl, clientId, clientSecret, scope); } - public static WebClient buildOIDCWebClient(Vertx vertx, String tokenUrl, String clientId, String clientSecret, String scope) { + public static WebClient buildOIDCWebClient(Vertx vertx, String tokenUrl, String clientId, + String clientSecret, String scope) { WebClient webClient = WebClient.create(vertx); - OAuth2Auth oAuth2Options = OAuth2Auth.create(vertx, new OAuth2Options() - .setFlow(OAuth2FlowType.CLIENT) - .setClientId(clientId) - .setClientSecret(clientSecret) - .setTokenPath(tokenUrl)); + OAuth2Auth oAuth2Options = OAuth2Auth.create(vertx, new OAuth2Options().setFlow(OAuth2FlowType.CLIENT) + .setClientId(clientId).setClientSecret(clientSecret).setTokenPath(tokenUrl)); Oauth2Credentials oauth2Credentials = new Oauth2Credentials(); @@ -50,10 +50,10 @@ public static WebClient buildSimpleAuthWebClient(String username, String passwor } public static WebClient buildSimpleAuthWebClient(Vertx vertx, String username, String password) { - String usernameAndPassword = Base64.getEncoder().encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8)); - return WebClientSession - .create(WebClient.create(vertx)) - .addHeader("Authorization", "Basic " + usernameAndPassword); + String usernameAndPassword = Base64.getEncoder() + .encodeToString((username + ":" + password).getBytes(StandardCharsets.UTF_8)); + return WebClientSession.create(WebClient.create(vertx)).addHeader("Authorization", + "Basic " + usernameAndPassword); } } diff --git a/operator/controller/pom.xml b/operator/controller/pom.xml index fd7d837805..b8a0b8807e 100644 --- a/operator/controller/pom.xml +++ b/operator/controller/pom.xml @@ -1,119 +1,116 @@ - - - 4.0.0 + + + 4.0.0 - - apicurio-registry-operator - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-operator + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-operator-controller - Apicurio Registry Operator :: Controller - - - io.apicurio - apicurio-registry-operator-model - ${project.version} - - - io.quarkiverse.operatorsdk - quarkus-operator-sdk - - - io.quarkus - quarkus-resteasy-jackson - - - io.quarkus - quarkus-rest-client - - - io.quarkus - quarkus-rest-client-jackson - - - io.quarkus - quarkus-openshift - - - io.quarkus - quarkus-minikube - - - io.quarkus - quarkus-kubernetes-client - - - org.bouncycastle - bcprov-jdk15on - ${bouncycastle.version} - - - org.bouncycastle - bcpkix-jdk15on - ${bouncycastle.version} - - - io.quarkus - quarkus-container-image-jib - - - io.quarkus - quarkus-test-common - test - - - io.quarkus - quarkus-junit5 - test - - - org.assertj - assertj-core - ${assertj.core.version} - - - org.awaitility - awaitility - ${awaitility.version} - test - - - io.rest-assured - rest-assured - test - - - - - - io.quarkus - quarkus-maven-plugin - - - - build - - - - - - maven-surefire-plugin - - - default-test - verify - - test - - - - - - - \ No newline at end of file + apicurio-registry-operator-controller + Apicurio Registry Operator :: Controller + + + io.apicurio + apicurio-registry-operator-model + ${project.version} + + + io.quarkiverse.operatorsdk + quarkus-operator-sdk + + + io.quarkus + quarkus-resteasy-jackson + + + io.quarkus + quarkus-rest-client + + + io.quarkus + quarkus-rest-client-jackson + + + io.quarkus + quarkus-openshift + + + io.quarkus + quarkus-minikube + + + io.quarkus + quarkus-kubernetes-client + + + org.bouncycastle + bcprov-jdk15on + ${bouncycastle.version} + + + org.bouncycastle + bcpkix-jdk15on + ${bouncycastle.version} + + + io.quarkus + quarkus-container-image-jib + + + io.quarkus + quarkus-test-common + test + + + io.quarkus + quarkus-junit5 + test + + + org.assertj + assertj-core + ${assertj.core.version} + + + org.awaitility + awaitility + ${awaitility.version} + test + + + io.rest-assured + rest-assured + test + + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + maven-surefire-plugin + + + default-test + + test + + verify + + + + + + diff --git a/operator/controller/src/main/java/io/apicurio/registry/operator/ApicurioDeployment.java b/operator/controller/src/main/java/io/apicurio/registry/operator/ApicurioDeployment.java index 8ca69c4866..16eebda2da 100644 --- a/operator/controller/src/main/java/io/apicurio/registry/operator/ApicurioDeployment.java +++ b/operator/controller/src/main/java/io/apicurio/registry/operator/ApicurioDeployment.java @@ -1,7 +1,5 @@ package io.apicurio.registry.operator; -import static io.apicurio.registry.operator.Constants.*; - import io.apicur.registry.v1.ApicurioRegistry; import io.fabric8.kubernetes.api.model.OwnerReferenceBuilder; import io.fabric8.kubernetes.api.model.apps.Deployment; @@ -10,72 +8,39 @@ import io.javaoperatorsdk.operator.processing.dependent.kubernetes.CRUDKubernetesDependentResource; import io.javaoperatorsdk.operator.processing.dependent.kubernetes.KubernetesDependent; +import static io.apicurio.registry.operator.Constants.*; + @KubernetesDependent(labelSelector = LABEL_SELECTOR_KEY) -public class ApicurioDeployment - extends CRUDKubernetesDependentResource { +public class ApicurioDeployment extends CRUDKubernetesDependentResource { - public ApicurioDeployment() { - super(Deployment.class); - } + public ApicurioDeployment() { + super(Deployment.class); + } - public static String name(ApicurioRegistry apicurioRegistry) { - return apicurioRegistry.getMetadata().getName(); - } + public static String name(ApicurioRegistry apicurioRegistry) { + return apicurioRegistry.getMetadata().getName(); + } - @Override - protected Deployment desired( - ApicurioRegistry apicurioRegistry, Context context) { - var labels = apicurioRegistry.getMetadata().getLabels(); - labels.putAll(Constants.defaultLabels(apicurioRegistry)); - return new DeploymentBuilder() - .withNewMetadata() - .withName(ApicurioDeployment.name(apicurioRegistry)) - .withNamespace(apicurioRegistry.getMetadata().getNamespace()) - .withOwnerReferences( - new OwnerReferenceBuilder() - .withController(true) - .withBlockOwnerDeletion(true) - .withApiVersion(apicurioRegistry.getApiVersion()) - .withKind(apicurioRegistry.getKind()) - .withName(apicurioRegistry.getMetadata().getName()) - .withUid(apicurioRegistry.getMetadata().getUid()) - .build()) - .withLabels(labels) - .endMetadata() - .withNewSpec() - .withNewSelector() - .addToMatchLabels(LABEL_SELECTOR_KEY, LABEL_SELECTOR_VALUE) - .endSelector() - .withReplicas(DEFAULT_REPLICAS) - .withNewTemplate() - .withNewMetadata() - .withLabels(labels) - .endMetadata() - .withNewSpec() - .addNewContainer() - .withName(CONTAINER_NAME) - .withImage(DEFAULT_CONTAINER_IMAGE) - .withImagePullPolicy("Always") - .withNewResources() - .withRequests(DEFAULT_REQUESTS) - .withLimits(DEFAULT_LIMITS) - .endResources() - .withReadinessProbe(DEFAULT_READINESS_PROBE) - .withLivenessProbe(DEFAULT_LIVENESS_PROBE) - .endContainer() - .endSpec() - .endTemplate() - .withNewStrategy() - .withNewRollingUpdate() - .withNewMaxUnavailable() - .withValue(1) - .endMaxUnavailable() - .withNewMaxSurge() - .withValue(1) - .endMaxSurge() - .endRollingUpdate() - .endStrategy() - .endSpec() - .build(); - } + @Override + protected Deployment desired(ApicurioRegistry apicurioRegistry, Context context) { + var labels = apicurioRegistry.getMetadata().getLabels(); + labels.putAll(Constants.defaultLabels(apicurioRegistry)); + return new DeploymentBuilder().withNewMetadata().withName(ApicurioDeployment.name(apicurioRegistry)) + .withNamespace(apicurioRegistry.getMetadata().getNamespace()) + .withOwnerReferences(new OwnerReferenceBuilder().withController(true) + .withBlockOwnerDeletion(true).withApiVersion(apicurioRegistry.getApiVersion()) + .withKind(apicurioRegistry.getKind()) + .withName(apicurioRegistry.getMetadata().getName()) + .withUid(apicurioRegistry.getMetadata().getUid()).build()) + .withLabels(labels).endMetadata().withNewSpec().withNewSelector() + .addToMatchLabels(LABEL_SELECTOR_KEY, LABEL_SELECTOR_VALUE).endSelector() + .withReplicas(DEFAULT_REPLICAS).withNewTemplate().withNewMetadata().withLabels(labels) + .endMetadata().withNewSpec().addNewContainer().withName(CONTAINER_NAME) + .withImage(DEFAULT_CONTAINER_IMAGE).withImagePullPolicy("Always").withNewResources() + .withRequests(DEFAULT_REQUESTS).withLimits(DEFAULT_LIMITS).endResources() + .withReadinessProbe(DEFAULT_READINESS_PROBE).withLivenessProbe(DEFAULT_LIVENESS_PROBE) + .endContainer().endSpec().endTemplate().withNewStrategy().withNewRollingUpdate() + .withNewMaxUnavailable().withValue(1).endMaxUnavailable().withNewMaxSurge().withValue(1) + .endMaxSurge().endRollingUpdate().endStrategy().endSpec().build(); + } } diff --git a/operator/controller/src/main/java/io/apicurio/registry/operator/Constants.java b/operator/controller/src/main/java/io/apicurio/registry/operator/Constants.java index 735a3ce63c..bd4b3a8fd7 100644 --- a/operator/controller/src/main/java/io/apicurio/registry/operator/Constants.java +++ b/operator/controller/src/main/java/io/apicurio/registry/operator/Constants.java @@ -5,68 +5,44 @@ import io.fabric8.kubernetes.api.model.ProbeBuilder; import io.fabric8.kubernetes.api.model.Quantity; import io.fabric8.kubernetes.api.model.QuantityBuilder; + import java.util.HashMap; import java.util.Map; public class Constants { - public static final String MANAGED_BY_LABEL = "app.kubernetes.io/managed-by"; - public static final String MANAGED_BY_VALUE = "apicurio-registry-operator"; - public static final String LABEL_SELECTOR_KEY = "app.apicurio-registry-operator.io/managed"; - public static final String LABEL_SELECTOR_VALUE = "true"; - - public static final int DEFAULT_REPLICAS = 1; - public static final String CONTAINER_NAME = "registry"; - public static final String DEFAULT_CONTAINER_IMAGE = - "apicurio/apicurio-registry-mem:latest-snapshot"; - - public static final Map DEFAULT_REQUESTS = - Map.of( - "cpu", new QuantityBuilder().withAmount("500").withFormat("m").build(), - "memory", new QuantityBuilder().withAmount("512").withFormat("Mi").build()); - public static final Map DEFAULT_LIMITS = - Map.of( - "cpu", new QuantityBuilder().withAmount("1").build(), - "memory", new QuantityBuilder().withAmount("1300").withFormat("Mi").build()); - public static final Probe DEFAULT_READINESS_PROBE = - new ProbeBuilder() - .withNewHttpGet() - .withPath("/health/ready") - .withNewPort() - .withValue(8080) - .endPort() - .endHttpGet() - .withInitialDelaySeconds(15) - .withTimeoutSeconds(5) - .withPeriodSeconds(10) - .withSuccessThreshold(1) - .withFailureThreshold(3) - .build(); - - public static final Probe DEFAULT_LIVENESS_PROBE = - new ProbeBuilder() - .withNewHttpGet() - .withPath("/health/live") - .withNewPort() - .withValue(8080) - .endPort() - .endHttpGet() - .withInitialDelaySeconds(15) - .withTimeoutSeconds(5) - .withPeriodSeconds(10) - .withSuccessThreshold(1) - .withFailureThreshold(3) - .build(); - - public static final Map BASIC_LABELS = - Map.of( - MANAGED_BY_LABEL, MANAGED_BY_VALUE, - LABEL_SELECTOR_KEY, LABEL_SELECTOR_VALUE); - - public static final Map defaultLabels(ApicurioRegistry apicurioRegistry) { - var labels = new HashMap(); - labels.putAll(BASIC_LABELS); - labels.put("app", apicurioRegistry.getMetadata().getName()); - return labels; - } + public static final String MANAGED_BY_LABEL = "app.kubernetes.io/managed-by"; + public static final String MANAGED_BY_VALUE = "apicurio-registry-operator"; + public static final String LABEL_SELECTOR_KEY = "app.apicurio-registry-operator.io/managed"; + public static final String LABEL_SELECTOR_VALUE = "true"; + + public static final int DEFAULT_REPLICAS = 1; + public static final String CONTAINER_NAME = "registry"; + public static final String DEFAULT_CONTAINER_IMAGE = "apicurio/apicurio-registry-mem:latest-snapshot"; + + public static final Map DEFAULT_REQUESTS = Map.of("cpu", + new QuantityBuilder().withAmount("500").withFormat("m").build(), "memory", + new QuantityBuilder().withAmount("512").withFormat("Mi").build()); + public static final Map DEFAULT_LIMITS = Map.of("cpu", + new QuantityBuilder().withAmount("1").build(), "memory", + new QuantityBuilder().withAmount("1300").withFormat("Mi").build()); + public static final Probe DEFAULT_READINESS_PROBE = new ProbeBuilder().withNewHttpGet() + .withPath("/health/ready").withNewPort().withValue(8080).endPort().endHttpGet() + .withInitialDelaySeconds(15).withTimeoutSeconds(5).withPeriodSeconds(10).withSuccessThreshold(1) + .withFailureThreshold(3).build(); + + public static final Probe DEFAULT_LIVENESS_PROBE = new ProbeBuilder().withNewHttpGet() + .withPath("/health/live").withNewPort().withValue(8080).endPort().endHttpGet() + .withInitialDelaySeconds(15).withTimeoutSeconds(5).withPeriodSeconds(10).withSuccessThreshold(1) + .withFailureThreshold(3).build(); + + public static final Map BASIC_LABELS = Map.of(MANAGED_BY_LABEL, MANAGED_BY_VALUE, + LABEL_SELECTOR_KEY, LABEL_SELECTOR_VALUE); + + public static final Map defaultLabels(ApicurioRegistry apicurioRegistry) { + var labels = new HashMap(); + labels.putAll(BASIC_LABELS); + labels.put("app", apicurioRegistry.getMetadata().getName()); + return labels; + } } diff --git a/operator/controller/src/main/java/io/apicurio/registry/operator/DeploymentController.java b/operator/controller/src/main/java/io/apicurio/registry/operator/DeploymentController.java index c7496b86e1..e61b0286a0 100644 --- a/operator/controller/src/main/java/io/apicurio/registry/operator/DeploymentController.java +++ b/operator/controller/src/main/java/io/apicurio/registry/operator/DeploymentController.java @@ -13,33 +13,30 @@ import io.quarkus.logging.Log; import jakarta.inject.Inject; -@ControllerConfiguration(dependents = {@Dependent(type = ApicurioDeployment.class)}) +@ControllerConfiguration(dependents = { @Dependent(type = ApicurioDeployment.class) }) public class DeploymentController - implements Reconciler, ErrorStatusHandler { - @Inject KubernetesClient client; + implements Reconciler, ErrorStatusHandler { + @Inject + KubernetesClient client; - @Override - public UpdateControl reconcile( - ApicurioRegistry apicurioRegistry, Context context) { - Log.infof("Reconciling Apicurio Registry: {}", apicurioRegistry); - var statusUpdater = new StatusUpdater(apicurioRegistry); + @Override + public UpdateControl reconcile(ApicurioRegistry apicurioRegistry, + Context context) { + Log.infof("Reconciling Apicurio Registry: {}", apicurioRegistry); + var statusUpdater = new StatusUpdater(apicurioRegistry); - return context - .getSecondaryResource(Deployment.class) - .map( - deployment -> { - Log.infof("Updating Apicurio Registry status:"); - apicurioRegistry.setStatus(statusUpdater.next(deployment)); - return UpdateControl.patchStatus(apicurioRegistry); - }) - .orElseGet(UpdateControl::noUpdate); - } + return context.getSecondaryResource(Deployment.class).map(deployment -> { + Log.infof("Updating Apicurio Registry status:"); + apicurioRegistry.setStatus(statusUpdater.next(deployment)); + return UpdateControl.patchStatus(apicurioRegistry); + }).orElseGet(UpdateControl::noUpdate); + } - @Override - public ErrorStatusUpdateControl updateErrorStatus( - ApicurioRegistry apicurioRegistry, Context context, Exception e) { - var statusUpdater = new StatusUpdater(apicurioRegistry); - apicurioRegistry.setStatus(statusUpdater.errorStatus(e)); - return ErrorStatusUpdateControl.updateStatus(apicurioRegistry); - } + @Override + public ErrorStatusUpdateControl updateErrorStatus(ApicurioRegistry apicurioRegistry, + Context context, Exception e) { + var statusUpdater = new StatusUpdater(apicurioRegistry); + apicurioRegistry.setStatus(statusUpdater.errorStatus(e)); + return ErrorStatusUpdateControl.updateStatus(apicurioRegistry); + } } diff --git a/operator/controller/src/main/java/io/apicurio/registry/operator/StatusUpdater.java b/operator/controller/src/main/java/io/apicurio/registry/operator/StatusUpdater.java index 91d9fb3c5d..81f1d7f418 100644 --- a/operator/controller/src/main/java/io/apicurio/registry/operator/StatusUpdater.java +++ b/operator/controller/src/main/java/io/apicurio/registry/operator/StatusUpdater.java @@ -4,6 +4,7 @@ import io.apicur.registry.v1.ApicurioRegistryStatus; import io.apicur.registry.v1.apicurioregistrystatus.Conditions; import io.fabric8.kubernetes.api.model.apps.Deployment; + import java.time.ZonedDateTime; import java.util.Arrays; import java.util.List; @@ -11,67 +12,61 @@ public class StatusUpdater { - public static final String ERROR_TYPE = "ERROR"; - - private ApicurioRegistry registry; + public static final String ERROR_TYPE = "ERROR"; - public StatusUpdater(ApicurioRegistry registry) { - this.registry = registry; - } + private ApicurioRegistry registry; - public ApicurioRegistryStatus errorStatus(Exception e) { - ZonedDateTime lastTransitionTime = ZonedDateTime.now(); - if (registry != null - && registry.getStatus() != null - && registry.getStatus().getConditions().size() > 0 - && - // TODO: better `lastTransitionTime` handling - registry.getStatus().getConditions().get(0).getLastTransitionTime() != null) { - lastTransitionTime = registry.getStatus().getConditions().get(0).getLastTransitionTime(); + public StatusUpdater(ApicurioRegistry registry) { + this.registry = registry; } - var generation = registry.getMetadata() == null ? null : registry.getMetadata().getGeneration(); - var newLastTransitionTime = ZonedDateTime.now(); - var errorCondition = new Conditions(); - errorCondition.setStatus(Conditions.Status.TRUE); - errorCondition.setType(ERROR_TYPE); - errorCondition.setObservedGeneration(generation); - errorCondition.setLastTransitionTime(newLastTransitionTime); - errorCondition.setMessage( - Arrays.stream(e.getStackTrace()) - .map(st -> st.toString()) - .collect(Collectors.joining("\n"))); - errorCondition.setReason("reasons"); + public ApicurioRegistryStatus errorStatus(Exception e) { + ZonedDateTime lastTransitionTime = ZonedDateTime.now(); + if (registry != null && registry.getStatus() != null + && registry.getStatus().getConditions().size() > 0 && + // TODO: better `lastTransitionTime` handling + registry.getStatus().getConditions().get(0).getLastTransitionTime() != null) { + lastTransitionTime = registry.getStatus().getConditions().get(0).getLastTransitionTime(); + } - var status = new ApicurioRegistryStatus(); - status.setConditions(List.of(errorCondition)); + var generation = registry.getMetadata() == null ? null : registry.getMetadata().getGeneration(); + var newLastTransitionTime = ZonedDateTime.now(); + var errorCondition = new Conditions(); + errorCondition.setStatus(Conditions.Status.TRUE); + errorCondition.setType(ERROR_TYPE); + errorCondition.setObservedGeneration(generation); + errorCondition.setLastTransitionTime(newLastTransitionTime); + errorCondition.setMessage( + Arrays.stream(e.getStackTrace()).map(st -> st.toString()).collect(Collectors.joining("\n"))); + errorCondition.setReason("reasons"); - return status; - } + var status = new ApicurioRegistryStatus(); + status.setConditions(List.of(errorCondition)); - public ApicurioRegistryStatus next(Deployment deployment) { - var lastTransitionTime = ZonedDateTime.now(); - if (registry != null - && registry.getStatus() != null - && registry.getStatus().getConditions().size() > 0 - && - // TODO: should we sort the conditions before taking the first? - registry.getStatus().getConditions().get(0).getLastTransitionTime() != null) { - lastTransitionTime = registry.getStatus().getConditions().get(0).getLastTransitionTime(); + return status; } - var generation = registry.getMetadata() == null ? null : registry.getMetadata().getGeneration(); - var nextCondition = new Conditions(); - nextCondition.setStatus(Conditions.Status.TRUE); - nextCondition.setType(ERROR_TYPE); - nextCondition.setObservedGeneration(generation); - nextCondition.setLastTransitionTime(lastTransitionTime); - nextCondition.setMessage("TODO"); - nextCondition.setReason("reasons"); + public ApicurioRegistryStatus next(Deployment deployment) { + var lastTransitionTime = ZonedDateTime.now(); + if (registry != null && registry.getStatus() != null + && registry.getStatus().getConditions().size() > 0 && + // TODO: should we sort the conditions before taking the first? + registry.getStatus().getConditions().get(0).getLastTransitionTime() != null) { + lastTransitionTime = registry.getStatus().getConditions().get(0).getLastTransitionTime(); + } - var status = new ApicurioRegistryStatus(); - status.setConditions(List.of(nextCondition)); + var generation = registry.getMetadata() == null ? null : registry.getMetadata().getGeneration(); + var nextCondition = new Conditions(); + nextCondition.setStatus(Conditions.Status.TRUE); + nextCondition.setType(ERROR_TYPE); + nextCondition.setObservedGeneration(generation); + nextCondition.setLastTransitionTime(lastTransitionTime); + nextCondition.setMessage("TODO"); + nextCondition.setReason("reasons"); - return status; - } + var status = new ApicurioRegistryStatus(); + status.setConditions(List.of(nextCondition)); + + return status; + } } diff --git a/operator/controller/src/test/java/io/apicurio/registry/operator/it/DeploymentITTest.java b/operator/controller/src/test/java/io/apicurio/registry/operator/it/DeploymentITTest.java index f3cc255523..b28d8b68ea 100644 --- a/operator/controller/src/test/java/io/apicurio/registry/operator/it/DeploymentITTest.java +++ b/operator/controller/src/test/java/io/apicurio/registry/operator/it/DeploymentITTest.java @@ -1,44 +1,33 @@ package io.apicurio.registry.operator.it; -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; - import io.apicur.registry.v1.ApicurioRegistry; import io.fabric8.kubernetes.api.model.ObjectMeta; import io.quarkus.test.junit.QuarkusTest; import org.junit.jupiter.api.Test; +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + @QuarkusTest public class DeploymentITTest extends ITBase { - @Test - void demoDeployment() { - // Arrange - var registry = new ApicurioRegistry(); - var meta = new ObjectMeta(); - meta.setName("demo"); - meta.setNamespace(getNamespace()); - registry.setMetadata(meta); + @Test + void demoDeployment() { + // Arrange + var registry = new ApicurioRegistry(); + var meta = new ObjectMeta(); + meta.setName("demo"); + meta.setNamespace(getNamespace()); + registry.setMetadata(meta); - // Act - client.resources(ApicurioRegistry.class).inNamespace(getNamespace()).create(registry); + // Act + client.resources(ApicurioRegistry.class).inNamespace(getNamespace()).create(registry); - // Assert - await() - .ignoreExceptions() - .until( - () -> { - assertThat( - client - .apps() - .deployments() - .inNamespace(getNamespace()) - .withName("demo") - .get() - .getStatus() - .getReadyReplicas()) - .isEqualTo(1); - return true; - }); - } + // Assert + await().ignoreExceptions().until(() -> { + assertThat(client.apps().deployments().inNamespace(getNamespace()).withName("demo").get() + .getStatus().getReadyReplicas()).isEqualTo(1); + return true; + }); + } } diff --git a/operator/controller/src/test/java/io/apicurio/registry/operator/it/ITBase.java b/operator/controller/src/test/java/io/apicurio/registry/operator/it/ITBase.java index c592b83141..41dbe9d902 100644 --- a/operator/controller/src/test/java/io/apicurio/registry/operator/it/ITBase.java +++ b/operator/controller/src/test/java/io/apicurio/registry/operator/it/ITBase.java @@ -1,7 +1,5 @@ package io.apicurio.registry.operator.it; -import static org.assertj.core.api.Assertions.assertThat; - import io.apicur.registry.v1.ApicurioRegistry; import io.apicurio.registry.operator.Constants; import io.fabric8.kubernetes.api.model.HasMetadata; @@ -20,9 +18,6 @@ import jakarta.enterprise.inject.Instance; import jakarta.enterprise.inject.spi.CDI; import jakarta.enterprise.util.TypeLiteral; -import java.io.FileInputStream; -import java.time.Duration; -import java.util.UUID; import org.awaitility.Awaitility; import org.eclipse.microprofile.config.ConfigProvider; import org.junit.jupiter.api.AfterAll; @@ -31,213 +26,185 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.TestInfo; +import java.io.FileInputStream; +import java.time.Duration; +import java.util.UUID; + +import static org.assertj.core.api.Assertions.assertThat; + public class ITBase { - public static final String DEPLOYMENT_TARGET = "test.operator.deployment-target"; - public static final String OPERATOR_DEPLOYMENT_PROP = "test.operator.deployment"; - public static final String CLEANUP = "test.operator.cleanup"; - public static final String GENERATED_RESOURCES_FOLDER = "target/kubernetes/"; - public static final String CRD_FILE = "../model/src/main/resources/kubernetes/crd.yml"; - - public enum OperatorDeployment { - local, - remote - } - - protected static OperatorDeployment operatorDeployment; - protected static Instance> reconcilers; - protected static QuarkusConfigurationService configuration; - protected static KubernetesClient client; - protected static String deploymentTarget; - protected static String namespace; - protected static boolean cleanup; - private static Operator operator; - - @BeforeAll - public static void before() throws Exception { - configuration = CDI.current().select(QuarkusConfigurationService.class).get(); - reconcilers = CDI.current().select(new TypeLiteral<>() {}); - operatorDeployment = - ConfigProvider.getConfig() - .getOptionalValue(OPERATOR_DEPLOYMENT_PROP, OperatorDeployment.class) - .orElse(OperatorDeployment.local); - deploymentTarget = - ConfigProvider.getConfig() - .getOptionalValue(DEPLOYMENT_TARGET, String.class) - .orElse("kubernetes"); - cleanup = ConfigProvider.getConfig().getOptionalValue(CLEANUP, Boolean.class).orElse(true); - - setDefaultAwaitilityTimings(); - calculateNamespace(); - createK8sClient(); - createCRDs(); - createNamespace(); - - if (operatorDeployment == OperatorDeployment.remote) { - createGeneratedResources(); - } else { - createOperator(); - registerReconcilers(); - operator.start(); + public static final String DEPLOYMENT_TARGET = "test.operator.deployment-target"; + public static final String OPERATOR_DEPLOYMENT_PROP = "test.operator.deployment"; + public static final String CLEANUP = "test.operator.cleanup"; + public static final String GENERATED_RESOURCES_FOLDER = "target/kubernetes/"; + public static final String CRD_FILE = "../model/src/main/resources/kubernetes/crd.yml"; + + public enum OperatorDeployment { + local, remote + } + + protected static OperatorDeployment operatorDeployment; + protected static Instance> reconcilers; + protected static QuarkusConfigurationService configuration; + protected static KubernetesClient client; + protected static String deploymentTarget; + protected static String namespace; + protected static boolean cleanup; + private static Operator operator; + + @BeforeAll + public static void before() throws Exception { + configuration = CDI.current().select(QuarkusConfigurationService.class).get(); + reconcilers = CDI.current().select(new TypeLiteral<>() { + }); + operatorDeployment = ConfigProvider.getConfig() + .getOptionalValue(OPERATOR_DEPLOYMENT_PROP, OperatorDeployment.class) + .orElse(OperatorDeployment.local); + deploymentTarget = ConfigProvider.getConfig().getOptionalValue(DEPLOYMENT_TARGET, String.class) + .orElse("kubernetes"); + cleanup = ConfigProvider.getConfig().getOptionalValue(CLEANUP, Boolean.class).orElse(true); + + setDefaultAwaitilityTimings(); + calculateNamespace(); + createK8sClient(); + createCRDs(); + createNamespace(); + + if (operatorDeployment == OperatorDeployment.remote) { + createGeneratedResources(); + } else { + createOperator(); + registerReconcilers(); + operator.start(); + } + } + + @BeforeEach + public void beforeEach(TestInfo testInfo) { + String testClassName = testInfo.getTestClass().map(c -> c.getSimpleName() + ".").orElse(""); + Log.info("\n------- STARTING: " + testClassName + testInfo.getDisplayName() + "\n" + + "------- Namespace: " + namespace + "\n" + "------- Mode: " + + ((operatorDeployment == OperatorDeployment.remote) ? "remote" : "local") + "\n" + + "------- Deployment target: " + deploymentTarget); + } + + private static void createK8sClient() { + client = new KubernetesClientBuilder() + .withConfig(new ConfigBuilder(Config.autoConfigure(null)).withNamespace(namespace).build()) + .build(); } - } - - @BeforeEach - public void beforeEach(TestInfo testInfo) { - String testClassName = testInfo.getTestClass().map(c -> c.getSimpleName() + ".").orElse(""); - Log.info( - "\n------- STARTING: " - + testClassName - + testInfo.getDisplayName() - + "\n" - + "------- Namespace: " - + namespace - + "\n" - + "------- Mode: " - + ((operatorDeployment == OperatorDeployment.remote) ? "remote" : "local") - + "\n" - + "------- Deployment target: " - + deploymentTarget); - } - - private static void createK8sClient() { - client = - new KubernetesClientBuilder() - .withConfig( - new ConfigBuilder(Config.autoConfigure(null)).withNamespace(namespace).build()) - .build(); - } - - private static void createGeneratedResources() throws Exception { - Log.info("Creating generated resources into Namespace " + namespace); - try (var fis = new FileInputStream(GENERATED_RESOURCES_FOLDER + deploymentTarget + ".json")) { - KubernetesList resources = Serialization.unmarshal(fis); - - resources.getItems().stream() - .forEach( - r -> { + + private static void createGeneratedResources() throws Exception { + Log.info("Creating generated resources into Namespace " + namespace); + try (var fis = new FileInputStream(GENERATED_RESOURCES_FOLDER + deploymentTarget + ".json")) { + KubernetesList resources = Serialization.unmarshal(fis); + + resources.getItems().stream().forEach(r -> { if (r.getKind().equals("ClusterRoleBinding") && r instanceof ClusterRoleBinding) { - var crb = (ClusterRoleBinding) r; - crb.getSubjects().stream().forEach(s -> s.setNamespace(getNamespace())); + var crb = (ClusterRoleBinding) r; + crb.getSubjects().stream().forEach(s -> s.setNamespace(getNamespace())); } client.resource(r).inNamespace(getNamespace()).createOrReplace(); - }); + }); + } } - } - - private static void cleanGeneratedResources() throws Exception { - if (cleanup) { - Log.info("Deleting generated resources from Namespace " + namespace); - try (var fis = new FileInputStream(GENERATED_RESOURCES_FOLDER + deploymentTarget + ".json")) { - KubernetesList resources = Serialization.unmarshal(fis); - - resources.getItems().stream() - .forEach( - r -> { - if (r.getKind().equals("ClusterRoleBinding") && r instanceof ClusterRoleBinding) { - var crb = (ClusterRoleBinding) r; - crb.getSubjects().stream().forEach(s -> s.setNamespace(getNamespace())); - } - client.resource(r).inNamespace(getNamespace()).delete(); + + private static void cleanGeneratedResources() throws Exception { + if (cleanup) { + Log.info("Deleting generated resources from Namespace " + namespace); + try (var fis = new FileInputStream(GENERATED_RESOURCES_FOLDER + deploymentTarget + ".json")) { + KubernetesList resources = Serialization.unmarshal(fis); + + resources.getItems().stream().forEach(r -> { + if (r.getKind().equals("ClusterRoleBinding") && r instanceof ClusterRoleBinding) { + var crb = (ClusterRoleBinding) r; + crb.getSubjects().stream().forEach(s -> s.setNamespace(getNamespace())); + } + client.resource(r).inNamespace(getNamespace()).delete(); }); - } + } + } } - } - - private static void createCRDs() { - Log.info("Creating CRDs"); - try { - var crd = client.load(new FileInputStream(CRD_FILE)); - crd.createOrReplace(); - } catch (Exception e) { - Log.warn("Failed to create the CRD, retrying", e); - createCRDs(); + + private static void createCRDs() { + Log.info("Creating CRDs"); + try { + var crd = client.load(new FileInputStream(CRD_FILE)); + crd.createOrReplace(); + } catch (Exception e) { + Log.warn("Failed to create the CRD, retrying", e); + createCRDs(); + } } - } - private static void registerReconcilers() { - Log.info( - "Registering reconcilers for operator : " + operator + " [" + operatorDeployment + "]"); + private static void registerReconcilers() { + Log.info("Registering reconcilers for operator : " + operator + " [" + operatorDeployment + "]"); - for (Reconciler reconciler : reconcilers) { - Log.info("Register and apply : " + reconciler.getClass().getName()); - operator.register(reconciler); + for (Reconciler reconciler : reconcilers) { + Log.info("Register and apply : " + reconciler.getClass().getName()); + operator.register(reconciler); + } } - } - private static void createOperator() { - operator = - new Operator( - configurationServiceOverrider -> { - configurationServiceOverrider.withKubernetesClient(client); - }); - } - - private static void createNamespace() { - Log.info("Creating Namespace " + namespace); - client - .resource( - new NamespaceBuilder() - .withNewMetadata() - .addToLabels("app", "apicurio-registry-operator-test") - .withName(namespace) - .endMetadata() - .build()) - .create(); - } - - private static void calculateNamespace() { - namespace = ("apicurio-registry-operator-test-" + UUID.randomUUID()).substring(0, 63); - } - - private static void setDefaultAwaitilityTimings() { - Awaitility.setDefaultPollInterval(Duration.ofSeconds(1)); - Awaitility.setDefaultTimeout(Duration.ofSeconds(360)); - } - - @AfterEach - public void cleanup() { - if (cleanup) { - Log.info("Deleting CRs"); - client.resources(ApicurioRegistry.class).delete(); - Awaitility.await() - .untilAsserted( - () -> { - var registryDeployments = - client - .apps() - .deployments() - .inNamespace(namespace) - .withLabels(Constants.BASIC_LABELS) - .list() - .getItems(); - assertThat(registryDeployments.size()).isZero(); - }); + private static void createOperator() { + operator = new Operator(configurationServiceOverrider -> { + configurationServiceOverrider.withKubernetesClient(client); + }); } - } - @AfterAll - public static void after() throws Exception { + private static void createNamespace() { + Log.info("Creating Namespace " + namespace); + client.resource( + new NamespaceBuilder().withNewMetadata().addToLabels("app", "apicurio-registry-operator-test") + .withName(namespace).endMetadata().build()) + .create(); + } - if (operatorDeployment == OperatorDeployment.local) { - Log.info("Stopping Operator"); - operator.stop(); + private static void calculateNamespace() { + namespace = ("apicurio-registry-operator-test-" + UUID.randomUUID()).substring(0, 63); + } - Log.info("Creating new K8s Client"); - // create a new client bc operator has closed the old one - createK8sClient(); - } else { - cleanGeneratedResources(); + private static void setDefaultAwaitilityTimings() { + Awaitility.setDefaultPollInterval(Duration.ofSeconds(1)); + Awaitility.setDefaultTimeout(Duration.ofSeconds(360)); } - if (cleanup) { - Log.info("Deleting namespace : " + namespace); - assertThat(client.namespaces().withName(namespace).delete()).isNotNull(); + @AfterEach + public void cleanup() { + if (cleanup) { + Log.info("Deleting CRs"); + client.resources(ApicurioRegistry.class).delete(); + Awaitility.await().untilAsserted(() -> { + var registryDeployments = client.apps().deployments().inNamespace(namespace) + .withLabels(Constants.BASIC_LABELS).list().getItems(); + assertThat(registryDeployments.size()).isZero(); + }); + } + } + + @AfterAll + public static void after() throws Exception { + + if (operatorDeployment == OperatorDeployment.local) { + Log.info("Stopping Operator"); + operator.stop(); + + Log.info("Creating new K8s Client"); + // create a new client bc operator has closed the old one + createK8sClient(); + } else { + cleanGeneratedResources(); + } + + if (cleanup) { + Log.info("Deleting namespace : " + namespace); + assertThat(client.namespaces().withName(namespace).delete()).isNotNull(); + } + client.close(); } - client.close(); - } - public static String getNamespace() { - return namespace; - } + public static String getNamespace() { + return namespace; + } } diff --git a/operator/controller/src/test/java/io/apicurio/registry/operator/unit/StatusUpdaterTest.java b/operator/controller/src/test/java/io/apicurio/registry/operator/unit/StatusUpdaterTest.java index 3648ac1d34..91b61e32cd 100644 --- a/operator/controller/src/test/java/io/apicurio/registry/operator/unit/StatusUpdaterTest.java +++ b/operator/controller/src/test/java/io/apicurio/registry/operator/unit/StatusUpdaterTest.java @@ -1,36 +1,36 @@ package io.apicurio.registry.operator.unit; -import static org.assertj.core.api.Assertions.assertThat; - import io.apicur.registry.v1.ApicurioRegistry; import io.apicur.registry.v1.apicurioregistrystatus.Conditions; import io.apicurio.registry.operator.StatusUpdater; import io.fabric8.kubernetes.api.model.ObjectMeta; import org.junit.jupiter.api.Test; +import static org.assertj.core.api.Assertions.assertThat; + public class StatusUpdaterTest { - private static final ApicurioRegistry defaultRegistry = new ApicurioRegistry(); - - static { - var meta = new ObjectMeta(); - meta.setName("dummy"); - meta.setNamespace("default"); - defaultRegistry.setMetadata(meta); - } - - @Test - void shouldReturnAnErrorStatus() { - // Arrange - var su = new StatusUpdater(defaultRegistry); - - // Act - var status = su.errorStatus(new RuntimeException("hello world")); - - // Assert - assertThat(status).isNotNull(); - assertThat(status.getConditions()).singleElement(); - assertThat(status.getConditions().get(0).getStatus()).isEqualTo(Conditions.Status.TRUE); - assertThat(status.getConditions().get(0).getType()).isEqualTo("ERROR"); - } + private static final ApicurioRegistry defaultRegistry = new ApicurioRegistry(); + + static { + var meta = new ObjectMeta(); + meta.setName("dummy"); + meta.setNamespace("default"); + defaultRegistry.setMetadata(meta); + } + + @Test + void shouldReturnAnErrorStatus() { + // Arrange + var su = new StatusUpdater(defaultRegistry); + + // Act + var status = su.errorStatus(new RuntimeException("hello world")); + + // Assert + assertThat(status).isNotNull(); + assertThat(status.getConditions()).singleElement(); + assertThat(status.getConditions().get(0).getStatus()).isEqualTo(Conditions.Status.TRUE); + assertThat(status.getConditions().get(0).getType()).isEqualTo("ERROR"); + } } diff --git a/operator/model/pom.xml b/operator/model/pom.xml index 12c1e66ea1..a741d2be1d 100644 --- a/operator/model/pom.xml +++ b/operator/model/pom.xml @@ -1,83 +1,80 @@ - - - 4.0.0 + + + 4.0.0 - - apicurio-registry-operator - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry-operator + 3.0.0-SNAPSHOT + ../pom.xml + + apicurio-registry-operator-model - Apicurio Registry Operator :: Model - apicurio-registry-operator-model + Apicurio Registry Operator :: Model - - - io.fabric8 - generator-annotations - ${kubernetes-client-java-gen.version} - - - io.fabric8 - kubernetes-client - ${kubernetes-client-java-gen.version} - - + + + io.fabric8 + generator-annotations + ${kubernetes-client-java-gen.version} + + + io.fabric8 + kubernetes-client + ${kubernetes-client-java-gen.version} + + - - + + - - io.fabric8 - java-generator-maven-plugin - ${kubernetes-client-java-gen.version} - - - - generate - - - - - src/main/resources/kubernetes/crd.yml - - + + io.fabric8 + java-generator-maven-plugin + ${kubernetes-client-java-gen.version} + + src/main/resources/kubernetes/crd.yml + + + + + generate + + + + - - io.smallrye - jandex-maven-plugin - - - make-index - - jandex - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 3.6.0 - - - addSource - generate-sources - - add-source - - - - ${project.basedir}/target/generated-sources/java - - - - - - - - \ No newline at end of file + + io.smallrye + jandex-maven-plugin + + + make-index + + jandex + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.6.0 + + + addSource + + add-source + + generate-sources + + + ${project.basedir}/target/generated-sources/java + + + + + + + + diff --git a/operator/pom.xml b/operator/pom.xml index 0b01bd9d46..3341fa71bc 100644 --- a/operator/pom.xml +++ b/operator/pom.xml @@ -1,93 +1,67 @@ - - - 4.0.0 + + + 4.0.0 - - apicurio-registry - io.apicurio - 3.0.0-SNAPSHOT - ../pom.xml - + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + + apicurio-registry-operator + pom - Apicurio Registry Operator - apicurio-registry-operator - pom + Apicurio Registry Operator - - 3.8.1 - true - 17 - 17 - 17 - UTF-8 - UTF-8 - 4.2.1 - 6.13.0 - 3.26.0 - 1.70 - - - 3.4.2 - 6.3.3 - apicurio - + + model + controller + - - - - io.quarkiverse.operatorsdk - quarkus-operator-sdk-bom - ${quarkus.operator.sdk.version} - pom - import - - - + + 3.8.1 + true + 17 + 17 + 17 + UTF-8 + UTF-8 + 4.2.1 + 6.13.0 + 3.26.0 + 1.70 + + + 3.4.2 + 6.3.3 + apicurio + - - model - controller - + + + + io.quarkiverse.operatorsdk + quarkus-operator-sdk-bom + ${quarkus.operator.sdk.version} + pom + import + + + - - + + - - org.apache.maven.plugins - maven-surefire-plugin - - ${skipOperatorTests} - - org.jboss.logmanager.LogManager - - - - - - com.theoryinpractise - googleformatter-maven-plugin - 1.7.5 - - - reformat-sources - - false - - false - false - true - 100 - - - format - - process-sources - - - - - - \ No newline at end of file + + org.apache.maven.plugins + maven-surefire-plugin + + ${skipOperatorTests} + + org.jboss.logmanager.LogManager + + + + + + diff --git a/pom.xml b/pom.xml index 0328940c39..bf8fb948e2 100644 --- a/pom.xml +++ b/pom.xml @@ -1,1164 +1,1217 @@ - 4.0.0 + 4.0.0 - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - pom - apicurio-registry + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + pom + apicurio-registry + Open Source API & Schema Registry - https://www.apicur.io/ - Open Source API & Schema Registry + https://www.apicur.io/ - - Red Hat - https://www.redhat.com/ - + + Red Hat + https://www.redhat.com/ + - - - Apache License Version 2.0 - https://repository.jboss.org/licenses/apache-2.0.txt - repo - - + + + Apache License Version 2.0 + https://repository.jboss.org/licenses/apache-2.0.txt + repo + + - - GitHub - https://github.com/apicurio/apicurio-registry/issues - + + + EricWittmann + Eric Wittmann + eric.wittmann@redhat.com + Red Hat + + Project Lead + Developer + + America/New_York + + + jsenko + Jakub Senko + m@jsenko.net + Red Hat + + Developer + + Europe/Prague + + + carlesarnal + Carles Arnal + carnalca@redhat.com + Red Hat + + Developer + + Europe/Madrid + + + andreaTP + Andrea Peruffo + aperuffo@redhat.com + Red Hat + + Developer + + Europe/Lisbon + + - - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - scm:git:git@github.com:apicurio/apicurio-registry.git - + + common + app + utils/converter + utils/kafka + utils/maven-plugin + utils/tools + utils/importexport + utils/exportConfluent + utils/protobuf-schema-utilities + utils/tests + schema-resolver + serdes/serde-common + serdes/avro-serde + serdes/protobuf-serde + serdes/jsonschema-serde + distro + docs + java-sdk + go-sdk - - - Eric Wittmann - EricWittmann - eric.wittmann@redhat.com - Red Hat - - Project Lead - Developer - - America/New_York - - - Jakub Senko - jsenko - m@jsenko.net - Red Hat - - Developer - - Europe/Prague - - - Carles Arnal - carlesarnal - carnalca@redhat.com - Red Hat - - Developer - - Europe/Madrid - - - Andrea Peruffo - andreaTP - aperuffo@redhat.com - Red Hat - - Developer - - Europe/Lisbon - - + + schema-util/common + schema-util/json + schema-util/protobuf + schema-util/asyncapi + schema-util/avro + schema-util/graphql + schema-util/kconnect + schema-util/openapi + schema-util/wsdl + schema-util/xml + schema-util/xsd + schema-util/util-provider - - common - app - utils/converter - utils/kafka - utils/maven-plugin - utils/tools - utils/importexport - utils/exportConfluent - utils/protobuf-schema-utilities - utils/tests - schema-resolver - serdes/serde-common - serdes/avro-serde - serdes/protobuf-serde - serdes/jsonschema-serde - distro - docs - java-sdk - go-sdk + operator + - - schema-util/common - schema-util/json - schema-util/protobuf - schema-util/asyncapi - schema-util/avro - schema-util/graphql - schema-util/kconnect - schema-util/openapi - schema-util/wsdl - schema-util/xml - schema-util/xsd - schema-util/util-provider + + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + scm:git:git@github.com:apicurio/apicurio-registry.git + - operator - + + GitHub + https://github.com/apicurio/apicurio-registry/issues + - - UTF-8 - UTF-8 - yyyy-MM-dd HH:mm:ss - ${maven.build.timestamp} + + UTF-8 + UTF-8 + yyyy-MM-dd HH:mm:ss + ${maven.build.timestamp} - 17 - 17 + 17 + 17 - posix + posix - - 16.17.0 + + 16.17.0 - - 3.11.1 + + 3.11.1 - - 3.1.8 + + 3.1.8 - - 1.11.3 - 1.4.0 - 4.9.9 - 4.12.0 - 4.9.9 - 3.9.0 - 3.9.0 - 3.9.0 - 75.1 - 3.19.6 - 4.0.2 - 2.40.0 - 1.6.3 - 1.4.2 + + 1.11.3 + 1.4.0 + 4.9.9 + 4.12.0 + 4.9.9 + 3.9.0 + 3.9.0 + 3.9.0 + 75.1 + 3.19.6 + 4.0.2 + 2.40.0 + 1.6.3 + 1.4.2 - - 4.6.3 + + 4.6.3 - - 7.6.1 + + 7.6.1 - - dev + + dev - - false + + false - - true + + true - - true + + true - - 2.0.4 + + 2.0.4 - - 22.1 + + 22.1 - - 1.14.4 - 20240303 - 2.17.1 - 2.15.2 + + 1.14.4 + + 20240303 + 2.17.1 + 2.15.2 - - 1.18.32 - 1.4.199 - 1.17.0 - 1.2.1.Final - 4.5.14 - 0.1.18.Final - 1.2.0 - 0.15.0 - 3.6.0 - 2.2 - 33.2.1-jre - 6.6.2 - 6.10.0.202406032230-r - 4.2.1 + + 1.18.32 + 1.4.199 + 1.17.0 + 1.2.1.Final + 4.5.14 + 0.1.18.Final + 1.2.0 + 0.15.0 + 3.6.0 + 2.2 + 33.2.1-jre + 6.6.2 + 6.10.0.202406032230-r + 4.2.1 - - 3.7.0 + + 3.7.0 - - 1.3.5 + + 1.3.5 - - 4.4.0 - 2.13.14 + + 4.4.0 + 2.13.14 - - 3.13.0 - 3.1.2 - 3.3.0 - 3.7.0 - 3.3.1 - 3.3.0 - 3.4.1 - 3.4.0 - 1.2.1 - 3.7.0 - 3.7.1 - 3.3.1 - 3.3.2 - 3.1.0 + + 3.13.0 + 3.1.2 + 3.3.0 + 3.7.0 + 3.3.1 + 3.3.0 + 3.4.1 + 3.4.0 + 1.2.1 + 3.7.0 + 3.7.1 + 3.3.1 + 3.3.2 + 3.1.0 + 2.43.0 - - 8.45.1 + + 8.45.1 - - 0.6.1 - 1.7.1 + + 0.6.1 + 1.7.1 - - 1.19.7 - 1.9.0 - 21.1.2 - 2.0.7 - 0.105.0 - 2.35.2 - 0.106.0 - + + 1.19.7 + 1.9.0 + 21.1.2 + 2.0.7 + 0.105.0 + 2.35.2 + 0.106.0 + - - - - - io.apicurio - apicurio-registry-common - ${project.version} - - - io.apicurio - apicurio-registry-app - ${project.version} - - - io.apicurio - apicurio-registry-java-sdk - ${project.version} - - - io.apicurio - apicurio-registry-java-sdk-common - ${project.version} - - - io.apicurio - apicurio-registry-utils-kafka - ${project.version} - - - io.apicurio - apicurio-registry-utils-kafka - ${project.version} - test - test-jar - - - io.apicurio - apicurio-registry-utils-converter - ${project.version} - - - io.apicurio - apicurio-registry-maven-plugin - test - ${project.version} - maven-plugin - - - io.apicurio - apicurio-registry-app - ${project.version} - test-jar - - - io.apicurio - apicurio-registry-utils-tests - ${project.version} - - - io.apicurio - apicurio-registry-integration-tests - ${project.version} - - - io.apicurio - apicurio-registry-examples - ${project.version} - - - io.apicurio - apicurio-registry-serde-common - ${project.version} - - - io.apicurio - apicurio-registry-serdes-avro-serde - ${project.version} - - - io.apicurio - apicurio-registry-serdes-protobuf-serde - ${project.version} - - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - ${project.version} - - - io.apicurio - apicurio-registry-utils-import-export - ${project.version} - - - io.apicurio - apicurio-registry-protobuf-schema-utilities - ${project.version} - - - io.apicurio - apicurio-common-rest-client-vertx - ${apicurio-common-rest-client.version} - - - io.apicurio - apicurio-common-rest-client-jdk - ${apicurio-common-rest-client.version} - - - io.apicurio - apicurio-common-rest-client-common - ${apicurio-common-rest-client.version} - + + + + + io.apicurio + apicurio-registry-common + ${project.version} + + + io.apicurio + apicurio-registry-app + ${project.version} + + + io.apicurio + apicurio-registry-java-sdk + ${project.version} + + + io.apicurio + apicurio-registry-java-sdk-common + ${project.version} + + + io.apicurio + apicurio-registry-utils-kafka + ${project.version} + + + io.apicurio + apicurio-registry-utils-kafka + ${project.version} + test-jar + test + + + io.apicurio + apicurio-registry-utils-converter + ${project.version} + + + io.apicurio + apicurio-registry-maven-plugin + ${project.version} + maven-plugin + test + + + io.apicurio + apicurio-registry-app + ${project.version} + test-jar + + + io.apicurio + apicurio-registry-utils-tests + ${project.version} + + + io.apicurio + apicurio-registry-integration-tests + ${project.version} + + + io.apicurio + apicurio-registry-examples + ${project.version} + + + io.apicurio + apicurio-registry-serde-common + ${project.version} + + + io.apicurio + apicurio-registry-serdes-avro-serde + ${project.version} + + + io.apicurio + apicurio-registry-serdes-protobuf-serde + ${project.version} + + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + ${project.version} + + + io.apicurio + apicurio-registry-utils-import-export + ${project.version} + + + io.apicurio + apicurio-registry-protobuf-schema-utilities + ${project.version} + + + io.apicurio + apicurio-common-rest-client-vertx + ${apicurio-common-rest-client.version} + + + io.apicurio + apicurio-common-rest-client-jdk + ${apicurio-common-rest-client.version} + + + io.apicurio + apicurio-common-rest-client-common + ${apicurio-common-rest-client.version} + - - io.apicurio - apicurio-common-app-components-core - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-common-app-components-logging - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-common-app-components-config - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-common-app-components-config-definitions - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-common-app-components-config-index - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-common-app-components-auth - ${apicurio-common-app-components.version} - - - io.apicurio - apicurio-registry-schema-util-provider - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-common - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-json - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-protobuf - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-asyncapi - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-avro - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-graphql - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-kconnect - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-openapi - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-wsdl - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-xml - ${project.version} - - - io.apicurio - apicurio-registry-schema-util-xsd - ${project.version} - - - io.apicurio - apicurio-registry-schema-resolver - ${project.version} - + + io.apicurio + apicurio-common-app-components-core + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-common-app-components-logging + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-common-app-components-config + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-common-app-components-config-definitions + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-common-app-components-config-index + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-common-app-components-auth + ${apicurio-common-app-components.version} + + + io.apicurio + apicurio-registry-schema-util-provider + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-common + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-json + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-protobuf + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-asyncapi + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-avro + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-graphql + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-kconnect + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-openapi + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-wsdl + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-xml + ${project.version} + + + io.apicurio + apicurio-registry-schema-util-xsd + ${project.version} + + + io.apicurio + apicurio-registry-schema-resolver + ${project.version} + - - - io.quarkus - quarkus-bom - ${quarkus.version} - pom - import - - - org.apache.kafka - connect-api - ${connect.version} - - - org.apache.kafka - connect-json - ${connect.version} - + + + io.quarkus + quarkus-bom + ${quarkus.version} + pom + import + + + org.apache.kafka + connect-api + ${connect.version} + + + org.apache.kafka + connect-json + ${connect.version} + - - - org.apache.kafka - kafka-clients - ${kafka-clients.version} - - - io.apicurio - apicurio-data-models - ${apicurio-data-models.version} - - - org.apache.avro - avro - ${avro.version} - - - com.networknt - json-schema-validator - ${json-schema-validator.version} - - - com.squareup.wire - wire-schema - ${wire-schema.version} - - - com.squareup.wire - wire-compiler - ${wire-compiler.version} - - - - com.squareup.wire - wire-grpc-client - - - - - kotlinx-serialization-core - org.jetbrains.kotlinx - 1.7.0 - - - com.ibm.icu - icu4j - ${icu4j.version} - - - com.squareup.okio - okio - ${okio.version} - - - com.squareup.okio - okio-jvm - ${okio-jvm.version} - - - com.squareup.okio - okhttp - ${okhttp.version} - - - com.squareup.okio - okio-fakefilesystem - ${okio-fake-file-system.version} - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - com.google.protobuf - protobuf-java-util - ${protobuf.version} - - - com.google.api.grpc - proto-google-common-protos - ${protobuf.googleapi.types.version} - - - org.projectlombok - lombok - ${lombok.version} - - - org.apache.santuario - xmlsec - ${xmlsec.version} - - - wsdl4j - wsdl4j - ${wsdl4j.version} - - - commons-codec - commons-codec - ${commons-codec.version} - - - org.apache.httpcomponents - httpclient - ${httpclient.version} - - - org.eclipse.jgit - org.eclipse.jgit - ${jgit.version} - + + + + org.apache.kafka + kafka-clients + ${kafka-clients.version} + + + io.apicurio + apicurio-data-models + ${apicurio-data-models.version} + + + org.apache.avro + avro + ${avro.version} + + + com.networknt + json-schema-validator + ${json-schema-validator.version} + + + com.squareup.wire + wire-schema + ${wire-schema.version} + + + com.squareup.wire + wire-compiler + ${wire-compiler.version} + + + + com.squareup.wire + wire-grpc-client + + + + + org.jetbrains.kotlinx + kotlinx-serialization-core + 1.7.0 + + + com.ibm.icu + icu4j + ${icu4j.version} + + + com.squareup.okio + okio + ${okio.version} + + + com.squareup.okio + okio-jvm + ${okio-jvm.version} + + + com.squareup.okio + okhttp + ${okhttp.version} + + + com.squareup.okio + okio-fakefilesystem + ${okio-fake-file-system.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + com.google.protobuf + protobuf-java-util + ${protobuf.version} + + + com.google.api.grpc + proto-google-common-protos + ${protobuf.googleapi.types.version} + + + org.projectlombok + lombok + ${lombok.version} + + + org.apache.santuario + xmlsec + ${xmlsec.version} + + + wsdl4j + wsdl4j + ${wsdl4j.version} + + + commons-codec + commons-codec + ${commons-codec.version} + + + org.apache.httpcomponents + httpclient + ${httpclient.version} + + + org.eclipse.jgit + org.eclipse.jgit + ${jgit.version} + - - - com.graphql-java - graphql-java - ${graphql.version} - + + + com.graphql-java + graphql-java + ${graphql.version} + - - com.github.everit-org.json-schema - org.everit.json.schema - ${org.everit.json.schema.version} - - - org.json - json - ${org.json.version} - - - com.fasterxml.jackson.datatype - jackson-datatype-json-org - ${jackson-datatype-json-org.version} - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - ${jackson-dataformat-yaml.version} - + + com.github.everit-org.json-schema + org.everit.json.schema + ${org.everit.json.schema.version} + + + org.json + json + ${org.json.version} + + + com.fasterxml.jackson.datatype + jackson-datatype-json-org + ${jackson-datatype-json-org.version} + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${jackson-dataformat-yaml.version} + - - io.strimzi - kafka-oauth-client - ${kafka-oauth-client.version} - + + io.strimzi + kafka-oauth-client + ${kafka-oauth-client.version} + - - com.google.guava - guava - ${guava.version} - + + com.google.guava + guava + ${guava.version} + - - com.fasterxml.woodstox - woodstox-core - ${woodstox-core.version} - + + com.fasterxml.woodstox + woodstox-core + ${woodstox-core.version} + + + + io.confluent + kafka-avro-serializer + ${confluent.version} + test + + + org.glassfish.jersey.ext + jersey-bean-validation + + + + + io.confluent + kafka-schema-registry-client + ${confluent.version} + + + org.scala-lang + scala-library + ${scala.version} + test + + + io.confluent + kafka-protobuf-serializer + ${confluent.version} + test + + + org.glassfish.jersey.ext + jersey-bean-validation + + + + + io.confluent + kafka-json-schema-serializer + ${confluent.version} + test + + + org.glassfish.jersey.ext + jersey-bean-validation + + + + + io.confluent + kafka-connect-avro-converter + ${confluent.version} + test + + + org.glassfish.jersey.ext + jersey-bean-validation + + + + + io.strimzi + strimzi-test-container + ${strimzi-test-container.version} + + + io.zonky.test + embedded-postgres + ${embedded-postgres.version} + + + io.quarkus + quarkus-test-common + ${quarkus.version} + + + com.github.dasniko + testcontainers-keycloak + ${keycloak.testcontainers.version} + + + org.keycloak + keycloak-admin-client-jakarta + ${keycloak-admin-client.version} + + + com.github.tomakehurst + wiremock-jre8 + ${wiremock-jre8.version} + + + com.google.truth.extensions + truth-proto-extension + ${google.truth.extension.version} + test + + + org.awaitility + awaitility + ${awaitility.version} + - - - io.confluent - kafka-avro-serializer - ${confluent.version} - test - - - org.glassfish.jersey.ext - jersey-bean-validation - - - - - io.confluent - kafka-schema-registry-client - ${confluent.version} - - - org.scala-lang - scala-library - ${scala.version} - test - - - io.confluent - kafka-protobuf-serializer - ${confluent.version} - test - - - org.glassfish.jersey.ext - jersey-bean-validation - - - - - io.confluent - kafka-json-schema-serializer - ${confluent.version} - test - - - org.glassfish.jersey.ext - jersey-bean-validation - - - - - io.confluent - kafka-connect-avro-converter - ${confluent.version} - test - - - org.glassfish.jersey.ext - jersey-bean-validation - - - - - io.strimzi - strimzi-test-container - ${strimzi-test-container.version} - - - io.zonky.test - embedded-postgres - ${embedded-postgres.version} - - - io.quarkus - quarkus-test-common - ${quarkus.version} - - - com.github.dasniko - testcontainers-keycloak - ${keycloak.testcontainers.version} - - - org.keycloak - keycloak-admin-client-jakarta - ${keycloak-admin-client.version} - - - com.github.tomakehurst - wiremock-jre8 - ${wiremock-jre8.version} - - - com.google.truth.extensions - truth-proto-extension - ${google.truth.extension.version} - test - - - org.awaitility - awaitility - ${awaitility.version} - + + + + + + + org.codehaus.mojo + properties-maven-plugin + ${version.properties.plugin} + + + org.apache.maven.plugins + maven-compiler-plugin + ${version.compiler.plugin} + + + org.apache.maven.plugins + maven-source-plugin + ${version.source.plugin} + + + org.apache.maven.plugins + maven-javadoc-plugin + ${version.javadoc.plugin} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${version.failsafe.plugin} + + + org.apache.maven.plugins + maven-surefire-plugin + ${version.surefire.plugin} + + + org.apache.maven.plugins + maven-deploy-plugin + ${version.deploy.plugin} + + + org.apache.maven.plugins + maven-jar-plugin + ${version.jar.plugin} + + + org.apache.maven.plugins + maven-checkstyle-plugin + ${version.checkstyle.plugin} + + + io.smallrye + jandex-maven-plugin + ${jandex.version} + + + io.quarkus + quarkus-maven-plugin + ${quarkus.version} + + + org.apache.maven.plugins + maven-dependency-plugin + ${version.dependency.plugin} + + + org.apache.maven.plugins + maven-assembly-plugin + ${version.assembly.plugin} + + + org.apache.maven.plugins + maven-resources-plugin + ${version.resources.plugin} + + + org.apache.maven.plugins + maven-clean-plugin + ${version.clean.plugin} + + + io.gatling + gatling-maven-plugin + ${version.gatling.plugin} + + + net.alchim31.maven + scala-maven-plugin + 4.9.1 + + incremental + + -deprecation + -explaintypes + -target:jvm-1.8 + + + + + scala-compile-first + + add-source + compile + + process-resources + + + scala-test-compile + + add-source + testCompile + + process-test-resources + + + + + + + + io.smallrye + jandex-maven-plugin + + + make-index + + jandex + + + + + + org.codehaus.mojo + properties-maven-plugin + + + + set-system-properties + + + + + quarkus.profile + ${quarkus.profile} + + + skipAppTests + ${skipAppTests} + + + skipOperatorTests + ${skipOperatorTests} + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.source} + ${maven.compiler.target} + false + false + + + + org.apache.maven.plugins + maven-source-plugin + + + attach-sources + + jar-no-fork + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + + jakarta.annotation + jakarta.annotation-api + ${annotation-api.version} + + + false + false + ${maven.compiler.source} + + + + attach-javadocs + + jar + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + com.puppycrawl.tools + checkstyle + ${version.puppycrawl} + - + + + validate + + check + + validate + + .checkstyle/simple.xml + .checkstyle/java.header + .checkstyle/suppressions.xml + true + UTF-8 + true + true + + + + + + org.apache.maven.plugins + maven-antrun-plugin + ${version.maven-antrun.plugin} + + + + run + + initialize + + + + + + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + 8.0.2 + + ${skipCommitIdPlugin} + true + ${project.build.directory}/meta/git.properties + full + + ^git.build.(time|version)$ + ^git.commit.id.(abbrev|full)$ + + + + + get-the-git-infos + + revision + + initialize + + + + + com.diffplug.spotless + spotless-maven-plugin + ${version.spotless} + + + + + *.md + .gitignore + + + + + true + 2 + + + + + + **/src/main/java/**/*.java + **/src/test/java/**/*.java + + + ide-config/eclipse-format.xml + + + ide-config/eclipse.importorder + + + + + + **/pom.xml + + + false + + + + + + format + + check + + process-sources + + + + + - - - - - org.codehaus.mojo - properties-maven-plugin - ${version.properties.plugin} - - - org.apache.maven.plugins - maven-compiler-plugin - ${version.compiler.plugin} - - - org.apache.maven.plugins - maven-source-plugin - ${version.source.plugin} - - - org.apache.maven.plugins - maven-javadoc-plugin - ${version.javadoc.plugin} - - - org.apache.maven.plugins - maven-failsafe-plugin - ${version.failsafe.plugin} - - - org.apache.maven.plugins - maven-surefire-plugin - ${version.surefire.plugin} - - - org.apache.maven.plugins - maven-deploy-plugin - ${version.deploy.plugin} - - - org.apache.maven.plugins - maven-jar-plugin - ${version.jar.plugin} - - - org.apache.maven.plugins - maven-checkstyle-plugin - ${version.checkstyle.plugin} - - - io.smallrye - jandex-maven-plugin - ${jandex.version} - - - io.quarkus - quarkus-maven-plugin - ${quarkus.version} - - - org.apache.maven.plugins - maven-dependency-plugin - ${version.dependency.plugin} - - - org.apache.maven.plugins - maven-assembly-plugin - ${version.assembly.plugin} - - - org.apache.maven.plugins - maven-resources-plugin - ${version.resources.plugin} - - - org.apache.maven.plugins - maven-clean-plugin - ${version.clean.plugin} - - - io.gatling - gatling-maven-plugin - ${version.gatling.plugin} - - - net.alchim31.maven - scala-maven-plugin - 4.9.1 - - incremental - - -deprecation - -explaintypes - -target:jvm-1.8 - - - - - scala-compile-first - process-resources - - add-source - compile - - - - scala-test-compile - process-test-resources - - add-source - testCompile - - - - - - + + + prod + + false + + + prod + + + + integration-tests + + integration-tests + + + + examples + + examples + + + + skip-auth-test + + + Windows + + + - - io.smallrye - jandex-maven-plugin - - - make-index - - jandex - - - - - - org.codehaus.mojo - properties-maven-plugin - - - - set-system-properties - - - - - quarkus.profile - ${quarkus.profile} - - - skipAppTests - ${skipAppTests} - - - skipOperatorTests - ${skipOperatorTests} - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${maven.compiler.source} - ${maven.compiler.target} - false - false - - - - org.apache.maven.plugins - maven-source-plugin - - - attach-sources - - jar-no-fork - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - - attach-javadocs - - jar - - - - - - - jakarta.annotation - jakarta.annotation-api - ${annotation-api.version} - - - false - false - ${maven.compiler.source} - - - - org.apache.maven.plugins - maven-checkstyle-plugin - - - com.puppycrawl.tools - checkstyle - ${version.puppycrawl} - - - - - validate - validate - - .checkstyle/simple.xml - .checkstyle/java.header - .checkstyle/suppressions.xml - true - UTF-8 - true - true - - - check - - - - - - org.apache.maven.plugins - maven-antrun-plugin - ${version.maven-antrun.plugin} - - - initialize - - - - - - - run - - - - - - io.github.git-commit-id - git-commit-id-maven-plugin - 8.0.2 - - - get-the-git-infos - - revision - - initialize - - + + org.apache.maven.plugins + maven-surefire-plugin + + + **/*SimpleAuthTest.java + **/*MojoAuthTest.java + **/*SimpleAuthIT.java + + + + + + + + release + + + ossrh + https://oss.sonatype.org/content/repositories/snapshots + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + true + + ossrh + https://oss.sonatype.org/ + true + + + + org.apache.maven.plugins + maven-gpg-plugin + 3.2.4 + + + sign-artifacts + + sign + + verify - ${skipCommitIdPlugin} - true - ${project.build.directory}/meta/git.properties - full - - ^git.build.(time|version)$ - ^git.commit.id.(abbrev|full)$ - + + + --pinentry-mode + loopback + - + + + - - - - - prod - - false - - - prod - - - - integration-tests - - integration-tests - - - - examples - - examples - - - - skip-auth-test - - - Windows - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/*SimpleAuthTest.java - **/*MojoAuthTest.java - **/*SimpleAuthIT.java - - - - - - - - release - - - ossrh - https://oss.sonatype.org/content/repositories/snapshots - - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - true - - ossrh - https://oss.sonatype.org/ - true - - - - org.apache.maven.plugins - maven-gpg-plugin - 3.2.4 - - - sign-artifacts - verify - - sign - - - - - --pinentry-mode - loopback - - - - - - - - - - java8 - - [1.8,) - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - -Xdoclint:none - - - - - - - - github - - - github - GitHub Packages - https://maven.pkg.github.com/Apicurio/apicurio-registry - - - - + + github + + + github + GitHub Packages + https://maven.pkg.github.com/Apicurio/apicurio-registry + + + + - - external_repos - - - - !skipDefault - - - - - confluent - Confluent - https://packages.confluent.io/maven/ - - - jitpack.io - https://jitpack.io - - - jgit-repository - https://repo.eclipse.org/content/groups/releases/ - - - - + + external_repos + + + + !skipDefault + + + + + confluent + Confluent + https://packages.confluent.io/maven/ + + + jitpack.io + https://jitpack.io + + + jgit-repository + https://repo.eclipse.org/content/groups/releases/ + + + + diff --git a/schema-resolver/pom.xml b/schema-resolver/pom.xml index b7a6936bef..d18221720d 100644 --- a/schema-resolver/pom.xml +++ b/schema-resolver/pom.xml @@ -1,37 +1,34 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../pom.xml + - apicurio-registry-schema-resolver - jar - apicurio-registry-schema-resolver + apicurio-registry-schema-resolver + jar + apicurio-registry-schema-resolver - + - - io.apicurio - apicurio-registry-java-sdk - - - io.apicurio - apicurio-registry-common - + + io.apicurio + apicurio-registry-java-sdk + + + io.apicurio + apicurio-registry-common + - - org.junit.jupiter - junit-jupiter - test - + + org.junit.jupiter + junit-jupiter + test + + + - - diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/AbstractSchemaResolver.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/AbstractSchemaResolver.java index e3945fbe75..1ca32c82af 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/AbstractSchemaResolver.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/AbstractSchemaResolver.java @@ -57,7 +57,8 @@ public void configure(Map configs, SchemaParser schemaParser) { if (client == null) { String baseUrl = config.getRegistryUrl(); if (baseUrl == null) { - throw new IllegalArgumentException("Missing registry base url, set " + SchemaResolverConfig.REGISTRY_URL); + throw new IllegalArgumentException( + "Missing registry base url, set " + SchemaResolverConfig.REGISTRY_URL); } String authServerURL = config.getAuthServiceUrl(); @@ -65,22 +66,20 @@ public void configure(Map configs, SchemaParser schemaParser) { try { if (authServerURL != null || tokenEndpoint != null) { - client = configureClientWithBearerAuthentication(config, baseUrl, authServerURL, tokenEndpoint); - } - else { + client = configureClientWithBearerAuthentication(config, baseUrl, authServerURL, + tokenEndpoint); + } else { String username = config.getAuthUsername(); if (username != null) { client = configureClientWithBasicAuth(config, baseUrl, username); - } - else { + } else { var adapter = new VertXRequestAdapter(this.vertx); adapter.setBaseUrl(baseUrl); client = new RegistryClient(adapter); } } - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalStateException(e); } } @@ -95,7 +94,8 @@ public void configure(Map configs, SchemaParser schemaParser) { schemaCache.configureFaultTolerantRefresh(config.getFaultTolerantRefresh()); schemaCache.configureGlobalIdKeyExtractor(SchemaLookupResult::getGlobalId); - schemaCache.configureContentKeyExtractor(schema -> Optional.ofNullable(schema.getParsedSchema().getRawSchema()).map(IoUtil::toString).orElse(null)); + schemaCache.configureContentKeyExtractor(schema -> Optional + .ofNullable(schema.getParsedSchema().getRawSchema()).map(IoUtil::toString).orElse(null)); schemaCache.configureContentIdKeyExtractor(SchemaLookupResult::getContentId); schemaCache.configureContentHashKeyExtractor(SchemaLookupResult::getContentHash); schemaCache.configureArtifactCoordinatesKeyExtractor(SchemaLookupResult::toArtifactCoordinates); @@ -129,7 +129,8 @@ public void setClient(RegistryClient client) { * @param artifactResolverStrategy the artifactResolverStrategy to set */ @Override - public void setArtifactResolverStrategy(ArtifactReferenceResolverStrategy artifactResolverStrategy) { + public void setArtifactResolverStrategy( + ArtifactReferenceResolverStrategy artifactResolverStrategy) { this.artifactResolverStrategy = artifactResolverStrategy; } @@ -142,21 +143,25 @@ public SchemaParser getSchemaParser() { } /** - * Resolve an artifact reference for the given record, and optional parsed schema. This will use - * the artifact resolver strategy and then override the values from that strategy with any explicitly configured - * values (groupId, artifactId, version). + * Resolve an artifact reference for the given record, and optional parsed schema. This will use the + * artifact resolver strategy and then override the values from that strategy with any explicitly + * configured values (groupId, artifactId, version). * * @param data * @param parsedSchema * @param isReference * @return artifact reference */ - protected ArtifactReference resolveArtifactReference(Record data, ParsedSchema parsedSchema, boolean isReference, String referenceArtifactId) { + protected ArtifactReference resolveArtifactReference(Record data, ParsedSchema parsedSchema, + boolean isReference, String referenceArtifactId) { ArtifactReference artifactReference = artifactResolverStrategy.artifactReference(data, parsedSchema); artifactReference = ArtifactReference.builder() - .groupId(this.explicitArtifactGroupId == null ? artifactReference.getGroupId() : this.explicitArtifactGroupId) - .artifactId(resolveArtifactId(artifactReference.getArtifactId(), isReference, referenceArtifactId)) - .version(this.explicitArtifactVersion == null ? artifactReference.getVersion() : this.explicitArtifactVersion) + .groupId(this.explicitArtifactGroupId == null ? artifactReference.getGroupId() + : this.explicitArtifactGroupId) + .artifactId(resolveArtifactId(artifactReference.getArtifactId(), isReference, + referenceArtifactId)) + .version(this.explicitArtifactVersion == null ? artifactReference.getVersion() + : this.explicitArtifactVersion) .build(); return artifactReference; @@ -165,8 +170,7 @@ protected ArtifactReference resolveArtifactReference(Record data, ParsedSchem protected String resolveArtifactId(String artifactId, boolean isReference, String referenceArtifactId) { if (isReference) { return referenceArtifactId; - } - else { + } else { return this.explicitArtifactId == null ? artifactId : this.explicitArtifactId; } } @@ -175,8 +179,7 @@ protected SchemaLookupResult resolveSchemaByGlobalId(long globalId) { return schemaCache.getByGlobalId(globalId, globalIdKey -> { if (deserializerDereference) { return resolveSchemaDereferenced(globalIdKey); - } - else { + } else { return resolveSchemaWithReferences(globalIdKey); } }); @@ -190,20 +193,14 @@ private SchemaLookupResult resolveSchemaDereferenced(long globalId) { config.queryParameters.references = HandleReferencesType.DEREFERENCE; }); - byte[] schema = IoUtil.toBytes(rawSchema); S parsed = schemaParser.parseSchema(schema, Collections.emptyMap()); - ParsedSchemaImpl ps = new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setRawSchema(schema); + ParsedSchemaImpl ps = new ParsedSchemaImpl().setParsedSchema(parsed).setRawSchema(schema); SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); - return result - .globalId(globalId) - .parsedSchema(ps) - .build(); + return result.globalId(globalId).parsedSchema(ps).build(); } private SchemaLookupResult resolveSchemaWithReferences(long globalId) { @@ -212,63 +209,69 @@ private SchemaLookupResult resolveSchemaWithReferences(long globalId) { config.headers.add("CANONICAL", "false"); }); - //Get the artifact references - final List artifactReferences = client.ids().globalIds().byGlobalId(globalId).references().get(); - //If there are any references for the schema being parsed, resolve them before parsing the schema + // Get the artifact references + final List artifactReferences = client + .ids().globalIds().byGlobalId(globalId).references().get(); + // If there are any references for the schema being parsed, resolve them before parsing the schema final Map> resolvedReferences = resolveReferences(artifactReferences); byte[] schema = IoUtil.toBytes(rawSchema); S parsed = schemaParser.parseSchema(schema, resolvedReferences); - ParsedSchemaImpl ps = new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setSchemaReferences(new ArrayList<>(resolvedReferences.values())) - .setRawSchema(schema); + ParsedSchemaImpl ps = new ParsedSchemaImpl().setParsedSchema(parsed) + .setSchemaReferences(new ArrayList<>(resolvedReferences.values())).setRawSchema(schema); SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); - return result - .globalId(globalId) - .parsedSchema(ps) - .build(); + return result.globalId(globalId).parsedSchema(ps).build(); } - protected Map> resolveReferences(List artifactReferences) { + protected Map> resolveReferences( + List artifactReferences) { Map> resolvedReferences = new HashMap<>(); artifactReferences.forEach(reference -> { - final InputStream referenceContent = client.groups().byGroupId(reference.getGroupId() == null ? "default" : reference.getGroupId()).artifacts() - .byArtifactId(reference.getArtifactId()).versions().byVersionExpression(reference.getVersion()).content().get(); + final InputStream referenceContent = client.groups() + .byGroupId(reference.getGroupId() == null ? "default" : reference.getGroupId()) + .artifacts().byArtifactId(reference.getArtifactId()).versions() + .byVersionExpression(reference.getVersion()).content().get(); final List referenceReferences = client - .groups() - .byGroupId(reference.getGroupId() == null - ? "default" - : reference.getGroupId()) // TODO verify the old logic: .pathParams(List.of(groupId == null ? "null" : groupId, artifactId, version)) GroupRequestsProvider.java - .artifacts() - .byArtifactId(reference.getArtifactId()) - .versions() - .byVersionExpression(reference.getVersion()) - .references() - .get(); + .groups().byGroupId(reference.getGroupId() == null ? "default" : reference.getGroupId()) // TODO + // verify + // the + // old + // logic: + // .pathParams(List.of(groupId + // == + // null + // ? + // "null" + // : + // groupId, + // artifactId, + // version)) + // GroupRequestsProvider.java + .artifacts().byArtifactId(reference.getArtifactId()).versions() + .byVersionExpression(reference.getVersion()).references().get(); if (!referenceReferences.isEmpty()) { final Map> nestedReferences = resolveReferences(referenceReferences); resolvedReferences.putAll(nestedReferences); - resolvedReferences.put(reference.getName(), parseSchemaFromStream(reference.getName(), referenceContent, resolveReferences(referenceReferences))); - } - else { - resolvedReferences.put(reference.getName(), parseSchemaFromStream(reference.getName(), referenceContent, Collections.emptyMap())); + resolvedReferences.put(reference.getName(), parseSchemaFromStream(reference.getName(), + referenceContent, resolveReferences(referenceReferences))); + } else { + resolvedReferences.put(reference.getName(), + parseSchemaFromStream(reference.getName(), referenceContent, Collections.emptyMap())); } }); return resolvedReferences; } - private ParsedSchema parseSchemaFromStream(String name, InputStream rawSchema, Map> resolvedReferences) { + private ParsedSchema parseSchemaFromStream(String name, InputStream rawSchema, + Map> resolvedReferences) { byte[] schema = IoUtil.toBytes(rawSchema); S parsed = schemaParser.parseSchema(schema, resolvedReferences); - return new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setSchemaReferences(new ArrayList<>(resolvedReferences.values())) - .setReferenceName(name) + return new ParsedSchemaImpl().setParsedSchema(parsed) + .setSchemaReferences(new ArrayList<>(resolvedReferences.values())).setReferenceName(name) .setRawSchema(schema); } @@ -290,12 +293,12 @@ public void close() throws IOException { } } - private RegistryClient configureClientWithBearerAuthentication(DefaultSchemaResolverConfig config, String registryUrl, String authServerUrl, String tokenEndpoint) { + private RegistryClient configureClientWithBearerAuthentication(DefaultSchemaResolverConfig config, + String registryUrl, String authServerUrl, String tokenEndpoint) { RequestAdapter auth; if (authServerUrl != null) { auth = configureAuthWithRealm(config, authServerUrl); - } - else { + } else { auth = configureAuthWithUrl(config, tokenEndpoint); } auth.setBaseUrl(registryUrl); @@ -306,10 +309,12 @@ private RequestAdapter configureAuthWithRealm(DefaultSchemaResolverConfig config final String realm = config.getAuthRealm(); if (realm == null) { - throw new IllegalArgumentException("Missing registry auth realm, set " + SchemaResolverConfig.AUTH_REALM); + throw new IllegalArgumentException( + "Missing registry auth realm, set " + SchemaResolverConfig.AUTH_REALM); } - final String tokenEndpoint = authServerUrl + String.format(SchemaResolverConfig.AUTH_SERVICE_URL_TOKEN_ENDPOINT, realm); + final String tokenEndpoint = authServerUrl + + String.format(SchemaResolverConfig.AUTH_SERVICE_URL_TOKEN_ENDPOINT, realm); return configureAuthWithUrl(config, tokenEndpoint); } @@ -318,26 +323,31 @@ private RequestAdapter configureAuthWithUrl(DefaultSchemaResolverConfig config, final String clientId = config.getAuthClientId(); if (clientId == null) { - throw new IllegalArgumentException("Missing registry auth clientId, set " + SchemaResolverConfig.AUTH_CLIENT_ID); + throw new IllegalArgumentException( + "Missing registry auth clientId, set " + SchemaResolverConfig.AUTH_CLIENT_ID); } final String clientSecret = config.getAuthClientSecret(); if (clientSecret == null) { - throw new IllegalArgumentException("Missing registry auth secret, set " + SchemaResolverConfig.AUTH_CLIENT_SECRET); + throw new IllegalArgumentException( + "Missing registry auth secret, set " + SchemaResolverConfig.AUTH_CLIENT_SECRET); } final String clientScope = config.getAuthClientScope(); - RequestAdapter adapter = new VertXRequestAdapter(buildOIDCWebClient(this.vertx, tokenEndpoint, clientId, clientSecret, clientScope)); + RequestAdapter adapter = new VertXRequestAdapter( + buildOIDCWebClient(this.vertx, tokenEndpoint, clientId, clientSecret, clientScope)); return adapter; } - private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig config, String registryUrl, String username) { + private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig config, + String registryUrl, String username) { final String password = config.getAuthPassword(); if (password == null) { - throw new IllegalArgumentException("Missing registry auth password, set " + SchemaResolverConfig.AUTH_PASSWORD); + throw new IllegalArgumentException( + "Missing registry auth password, set " + SchemaResolverConfig.AUTH_PASSWORD); } var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(this.vertx, username, password)); @@ -346,7 +356,8 @@ private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig return new RegistryClient(adapter); } - protected void loadFromMetaData(VersionMetaData artifactMetadata, SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { + protected void loadFromMetaData(VersionMetaData artifactMetadata, + SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { resultBuilder.globalId(artifactMetadata.getGlobalId()); resultBuilder.contentId(artifactMetadata.getContentId()); resultBuilder.groupId(artifactMetadata.getGroupId()); @@ -354,7 +365,8 @@ protected void loadFromMetaData(VersionMetaData artifactMetadata, SchemaLookupRe resultBuilder.version(String.valueOf(artifactMetadata.getVersion())); } - protected void loadFromSearchedVersion(SearchedVersion version, SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { + protected void loadFromSearchedVersion(SearchedVersion version, + SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { resultBuilder.globalId(version.getGlobalId()); resultBuilder.contentId(version.getContentId()); resultBuilder.groupId(version.getGroupId()); diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/DefaultSchemaResolver.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/DefaultSchemaResolver.java index fc90a30b07..c5032c0004 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/DefaultSchemaResolver.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/DefaultSchemaResolver.java @@ -28,7 +28,6 @@ /** * Default implementation of {@link SchemaResolver} - * */ public class DefaultSchemaResolver extends AbstractSchemaResolver { @@ -46,7 +45,8 @@ public void reset() { } /** - * @see io.apicurio.registry.resolver.AbstractSchemaResolver#configure(java.util.Map, io.apicurio.registry.resolver.SchemaParser) + * @see io.apicurio.registry.resolver.AbstractSchemaResolver#configure(java.util.Map, + * io.apicurio.registry.resolver.SchemaParser) */ @Override public void configure(Map configs, SchemaParser schemaParser) { @@ -70,7 +70,6 @@ public SchemaLookupResult resolveSchema(Record data) { Objects.requireNonNull(data); Objects.requireNonNull(data.payload()); - ParsedSchema parsedSchema; if (artifactResolverStrategy.loadSchema() && schemaParser.supportsExtractSchemaFromData()) { parsedSchema = schemaParser.getSchemaFromData(data, dereference); @@ -85,20 +84,25 @@ public SchemaLookupResult resolveSchema(Record data) { } private Optional> getSchemaFromCache(ArtifactReference artifactReference) { - if (artifactReference.getGlobalId() != null && schemaCache.containsByGlobalId(artifactReference.getGlobalId())) { + if (artifactReference.getGlobalId() != null + && schemaCache.containsByGlobalId(artifactReference.getGlobalId())) { return Optional.of(resolveSchemaByGlobalId(artifactReference.getGlobalId())); - } else if (artifactReference.getContentId() != null && schemaCache.containsByContentId(artifactReference.getContentId())) { + } else if (artifactReference.getContentId() != null + && schemaCache.containsByContentId(artifactReference.getContentId())) { return Optional.of(resolveSchemaByContentId(artifactReference.getContentId())); - } else if (artifactReference.getContentHash() != null && schemaCache.containsByContentHash(artifactReference.getContentHash())) { + } else if (artifactReference.getContentHash() != null + && schemaCache.containsByContentHash(artifactReference.getContentHash())) { return Optional.of(resolveSchemaByContentHash(artifactReference.getContentHash())); - } else if (schemaCache.containsByArtifactCoordinates(ArtifactCoordinates.fromArtifactReference(artifactReference))) { - return Optional.of(resolveSchemaByArtifactCoordinatesCached(ArtifactCoordinates.fromArtifactReference(artifactReference))); + } else if (schemaCache.containsByArtifactCoordinates( + ArtifactCoordinates.fromArtifactReference(artifactReference))) { + return Optional.of(resolveSchemaByArtifactCoordinatesCached( + ArtifactCoordinates.fromArtifactReference(artifactReference))); } return Optional.empty(); } - - private SchemaLookupResult getSchemaFromRegistry(ParsedSchema parsedSchema, Record data, ArtifactReference artifactReference) { + private SchemaLookupResult getSchemaFromRegistry(ParsedSchema parsedSchema, Record data, + ArtifactReference artifactReference) { if (autoCreateArtifact) { @@ -109,20 +113,23 @@ private SchemaLookupResult getSchemaFromRegistry(ParsedSchema parsedSchema } if (parsedSchema.hasReferences()) { - //List of references lookup, to be used to create the references for the artifact - final List> schemaLookupResults = handleArtifactReferences(data, parsedSchema); + // List of references lookup, to be used to create the references for the artifact + final List> schemaLookupResults = handleArtifactReferences(data, + parsedSchema); return handleAutoCreateArtifact(parsedSchema, artifactReference, schemaLookupResults); } else { return handleAutoCreateArtifact(parsedSchema, artifactReference); } - } else if (config.getExplicitSchemaLocation() != null && schemaParser.supportsGetSchemaFromLocation()) { + } else if (config.getExplicitSchemaLocation() != null + && schemaParser.supportsGetSchemaFromLocation()) { parsedSchema = schemaParser.getSchemaFromLocation(config.getExplicitSchemaLocation()); return handleAutoCreateArtifact(parsedSchema, artifactReference); } } if (findLatest || artifactReference.getVersion() != null) { - return resolveSchemaByCoordinates(artifactReference.getGroupId(), artifactReference.getArtifactId(), artifactReference.getVersion()); + return resolveSchemaByCoordinates(artifactReference.getGroupId(), + artifactReference.getArtifactId(), artifactReference.getVersion()); } if (schemaParser.supportsExtractSchemaFromData()) { @@ -132,10 +139,12 @@ private SchemaLookupResult getSchemaFromRegistry(ParsedSchema parsedSchema return handleResolveSchemaByContent(parsedSchema, artifactReference); } - return resolveSchemaByCoordinates(artifactReference.getGroupId(), artifactReference.getArtifactId(), artifactReference.getVersion()); + return resolveSchemaByCoordinates(artifactReference.getGroupId(), artifactReference.getArtifactId(), + artifactReference.getVersion()); } - private List> handleArtifactReferences(Record data, ParsedSchema parsedSchema) { + private List> handleArtifactReferences(Record data, + ParsedSchema parsedSchema) { final List> referencesLookup = new ArrayList<>(); for (ParsedSchema referencedSchema : parsedSchema.getSchemaReferences()) { @@ -143,16 +152,19 @@ private List> handleArtifactReferences(Record data, Par List> nestedReferences = handleArtifactReferences(data, referencedSchema); if (nestedReferences.isEmpty()) { - referencesLookup.add(handleAutoCreateArtifact(referencedSchema, resolveArtifactReference(data, referencedSchema, true, referencedSchema.referenceName()))); + referencesLookup.add(handleAutoCreateArtifact(referencedSchema, resolveArtifactReference(data, + referencedSchema, true, referencedSchema.referenceName()))); } else { - referencesLookup.add(handleAutoCreateArtifact(referencedSchema, resolveArtifactReference(data, referencedSchema, true, referencedSchema.referenceName()), nestedReferences)); + referencesLookup.add(handleAutoCreateArtifact(referencedSchema, resolveArtifactReference(data, + referencedSchema, true, referencedSchema.referenceName()), nestedReferences)); } } return referencesLookup; } /** - * @see io.apicurio.registry.resolver.SchemaResolver#resolveSchemaByArtifactReference (io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl) + * @see io.apicurio.registry.resolver.SchemaResolver#resolveSchemaByArtifactReference + * (io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl) */ @Override public SchemaLookupResult resolveSchemaByArtifactReference(ArtifactReference reference) { @@ -170,15 +182,18 @@ public SchemaLookupResult resolveSchemaByArtifactReference(ArtifactReference return resolveSchemaByGlobalId(reference.getGlobalId()); } - return resolveSchemaByCoordinates(reference.getGroupId(), reference.getArtifactId(), reference.getVersion()); + return resolveSchemaByCoordinates(reference.getGroupId(), reference.getArtifactId(), + reference.getVersion()); } - private SchemaLookupResult resolveSchemaByCoordinates(String groupId, String artifactId, String version) { + private SchemaLookupResult resolveSchemaByCoordinates(String groupId, String artifactId, + String version) { if (artifactId == null) { throw new IllegalStateException("artifactId cannot be null"); } - ArtifactReference reference = ArtifactReference.builder().groupId(groupId).artifactId(artifactId).version(version).build(); + ArtifactReference reference = ArtifactReference.builder().groupId(groupId).artifactId(artifactId) + .version(version).build(); return resolveSchemaByArtifactReferenceCached(reference); } @@ -186,72 +201,65 @@ private SchemaLookupResult resolveSchemaByCoordinates(String groupId, String protected SchemaLookupResult resolveSchemaByContentId(long contentId) { return schemaCache.getByContentId(contentId, contentIdKey -> { - // it's impossible to retrieve more info about the artifact with only the contentId, and that's ok for this case + // it's impossible to retrieve more info about the artifact with only the contentId, and that's ok + // for this case InputStream rawSchema = null; ParsedSchemaImpl ps = null; try { rawSchema = client.ids().contentIds().byContentId(contentIdKey).get(); - //Get the artifact references - final List artifactReferences = - client.ids().contentIds().byContentId(contentId).references().get(); - //If there are any references for the schema being parsed, resolve them before parsing the schema + // Get the artifact references + final List artifactReferences = client + .ids().contentIds().byContentId(contentId).references().get(); + // If there are any references for the schema being parsed, resolve them before parsing the + // schema final Map> resolvedReferences = resolveReferences(artifactReferences); byte[] schema = rawSchema.readAllBytes(); S parsed = schemaParser.parseSchema(schema, resolvedReferences); - ps = new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setRawSchema(schema); + ps = new ParsedSchemaImpl().setParsedSchema(parsed).setRawSchema(schema); } catch (IOException e) { throw new RuntimeException(e); } SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); - return result - .contentId(contentIdKey) - .parsedSchema(ps) - .build(); + return result.contentId(contentIdKey).parsedSchema(ps).build(); }); } protected SchemaLookupResult resolveSchemaByContentHash(String contentHash) { return schemaCache.getByContentHash(contentHash, contentHashKey -> { - // it's impossible to retrieve more info about the artifact with only the contentHash, and that's ok for this case + // it's impossible to retrieve more info about the artifact with only the contentHash, and that's + // ok for this case InputStream rawSchema = null; ParsedSchemaImpl ps = null; rawSchema = client.ids().contentHashes().byContentHash(contentHashKey).get(); - //Get the artifact references + // Get the artifact references final List artifactReferences = client .ids().contentHashes().byContentHash(contentHashKey).references().get(); - //If there are any references for the schema being parsed, resolve them before parsing the schema + // If there are any references for the schema being parsed, resolve them before parsing the schema final Map> resolvedReferences = resolveReferences(artifactReferences); byte[] schema = IoUtil.toBytes(rawSchema); S parsed = schemaParser.parseSchema(schema, resolvedReferences); - ps = new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setRawSchema(schema); + ps = new ParsedSchemaImpl().setParsedSchema(parsed).setRawSchema(schema); SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); - return result - .contentHash(contentHashKey) - .parsedSchema(ps) - .build(); + return result.contentHash(contentHashKey).parsedSchema(ps).build(); }); } /** - * Search by content may not work for some usecases of our Serdes implementations. - * For example when serializing protobuf messages, the schema inferred from the data - * may not be equal to the .proto file schema uploaded in the registry. + * Search by content may not work for some usecases of our Serdes implementations. For example when + * serializing protobuf messages, the schema inferred from the data may not be equal to the .proto file + * schema uploaded in the registry. */ private SchemaLookupResult handleResolveSchemaByContent(ParsedSchema parsedSchema, - final ArtifactReference artifactReference) { + final ArtifactReference artifactReference) { String rawSchemaString = IoUtil.toString(parsedSchema.getRawSchema()); @@ -260,7 +268,8 @@ private SchemaLookupResult handleResolveSchemaByContent(ParsedSchema parse String at = schemaParser.artifactType(); String ct = toContentType(at); VersionSearchResults results = client.search().versions().post(is, ct, config -> { - config.queryParameters.groupId = artifactReference.getGroupId() == null ? "default" : artifactReference.getGroupId(); + config.queryParameters.groupId = artifactReference.getGroupId() == null ? "default" + : artifactReference.getGroupId(); config.queryParameters.artifactId = artifactReference.getArtifactId(); config.queryParameters.canonical = true; config.queryParameters.artifactType = at; @@ -269,7 +278,8 @@ private SchemaLookupResult handleResolveSchemaByContent(ParsedSchema parse }); if (results.getCount() == 0) { - throw new RuntimeException("Could not resolve artifact reference by content: " + artifactReference); + throw new RuntimeException( + "Could not resolve artifact reference by content: " + artifactReference); } SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); @@ -283,7 +293,7 @@ private SchemaLookupResult handleResolveSchemaByContent(ParsedSchema parse } private SchemaLookupResult handleAutoCreateArtifact(ParsedSchema parsedSchema, - final ArtifactReference artifactReference) { + final ArtifactReference artifactReference) { String rawSchemaString = IoUtil.toString(parsedSchema.getRawSchema()); return schemaCache.getByContent(rawSchemaString, contentKey -> { @@ -300,14 +310,12 @@ private SchemaLookupResult handleAutoCreateArtifact(ParsedSchema parsedSch vc.setContentType(toContentType(schemaParser.artifactType())); version.setContent(vc); - CreateArtifactResponse car = client - .groups() - .byGroupId(artifactReference.getGroupId() == null ? "default" : artifactReference.getGroupId()) - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.forValue(this.autoCreateBehavior); - config.queryParameters.canonical = false; - }); + CreateArtifactResponse car = client.groups().byGroupId( + artifactReference.getGroupId() == null ? "default" : artifactReference.getGroupId()) + .artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.forValue(this.autoCreateBehavior); + config.queryParameters.canonical = false; + }); SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); @@ -320,11 +328,12 @@ private SchemaLookupResult handleAutoCreateArtifact(ParsedSchema parsedSch } private SchemaLookupResult handleAutoCreateArtifact(ParsedSchema parsedSchema, - final ArtifactReference artifactReference, List> referenceLookups) { + final ArtifactReference artifactReference, List> referenceLookups) { String rawSchemaString = IoUtil.toString(parsedSchema.getRawSchema()); - final List artifactReferences = parseReferences(referenceLookups); + final List artifactReferences = parseReferences( + referenceLookups); return schemaCache.getByContent(rawSchemaString, contentKey -> { CreateArtifact createArtifact = new CreateArtifact(); @@ -341,14 +350,12 @@ private SchemaLookupResult handleAutoCreateArtifact(ParsedSchema parsedSch vc.setReferences(artifactReferences); version.setContent(vc); - CreateArtifactResponse car = client - .groups() - .byGroupId(artifactReference.getGroupId() == null ? "default" : artifactReference.getGroupId()) - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.forValue(this.autoCreateBehavior); - config.queryParameters.canonical = false; - }); + CreateArtifactResponse car = client.groups().byGroupId( + artifactReference.getGroupId() == null ? "default" : artifactReference.getGroupId()) + .artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.forValue(this.autoCreateBehavior); + config.queryParameters.canonical = false; + }); SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); @@ -385,7 +392,8 @@ private String toContentType(String artifactType) { throw new IllegalArgumentException("Artifact type not supported: " + artifactType); } - private List parseReferences(List> referenceLookups) { + private List parseReferences( + List> referenceLookups) { final List artifactReferences = new ArrayList<>(); referenceLookups.forEach(referenceLookup -> { @@ -400,27 +408,36 @@ private List parseRef return artifactReferences; } - private SchemaLookupResult resolveSchemaByArtifactCoordinatesCached(ArtifactCoordinates artifactCoordinates) { - return schemaCache.getByArtifactCoordinates(artifactCoordinates, artifactCoordinatesKey -> resolveByCoordinates(artifactCoordinatesKey.getGroupId(), artifactCoordinatesKey.getArtifactId(), artifactCoordinatesKey.getVersion())); + private SchemaLookupResult resolveSchemaByArtifactCoordinatesCached( + ArtifactCoordinates artifactCoordinates) { + return schemaCache.getByArtifactCoordinates(artifactCoordinates, + artifactCoordinatesKey -> resolveByCoordinates(artifactCoordinatesKey.getGroupId(), + artifactCoordinatesKey.getArtifactId(), artifactCoordinatesKey.getVersion())); } - - private SchemaLookupResult resolveSchemaByArtifactReferenceCached(ArtifactReference artifactReference) { + private SchemaLookupResult resolveSchemaByArtifactReferenceCached( + ArtifactReference artifactReference) { if (artifactReference.getGlobalId() != null) { return schemaCache.getByGlobalId(artifactReference.getGlobalId(), this::resolveSchemaByGlobalId); } else if (artifactReference.getContentId() != null) { - return schemaCache.getByContentId(artifactReference.getContentId(), this::resolveSchemaByContentId); + return schemaCache.getByContentId(artifactReference.getContentId(), + this::resolveSchemaByContentId); } else if (artifactReference.getContentHash() != null) { - return schemaCache.getByContentHash(artifactReference.getContentHash(), this::resolveSchemaByContentHash); + return schemaCache.getByContentHash(artifactReference.getContentHash(), + this::resolveSchemaByContentHash); } else { - return schemaCache.getByArtifactCoordinates(ArtifactCoordinates.fromArtifactReference(artifactReference), artifactReferenceKey -> resolveByCoordinates(artifactReferenceKey.getGroupId(), artifactReferenceKey.getArtifactId(), artifactReferenceKey.getVersion())); + return schemaCache.getByArtifactCoordinates( + ArtifactCoordinates.fromArtifactReference(artifactReference), + artifactReferenceKey -> resolveByCoordinates(artifactReferenceKey.getGroupId(), + artifactReferenceKey.getArtifactId(), artifactReferenceKey.getVersion())); } } private SchemaLookupResult resolveByCoordinates(String groupId, String artifactId, String version) { SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); - //TODO if getArtifactVersion returns the artifact version and globalid in the headers we can reduce this to only one http call - + // TODO if getArtifactVersion returns the artifact version and globalid in the headers we can reduce + // this to only one http call + if (version == null) { version = "branch=latest"; } @@ -428,24 +445,23 @@ private SchemaLookupResult resolveByCoordinates(String groupId, String artifa S parsed = null; byte[] schema = null; Long gid; - VersionMetaData metadata = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(version).get(); + VersionMetaData metadata = client.groups().byGroupId(groupId).artifacts().byArtifactId(artifactId) + .versions().byVersionExpression(version).get(); loadFromMetaData(metadata, result); gid = metadata.getGlobalId(); InputStream rawSchema = client.ids().globalIds().byGlobalId(gid).get(); - //Get the artifact references + // Get the artifact references final List artifactReferences = client .ids().globalIds().byGlobalId(gid).references().get(); - //If there are any references for the schema being parsed, resolve them before parsing the schema + // If there are any references for the schema being parsed, resolve them before parsing the schema final Map> resolvedReferences = resolveReferences(artifactReferences); schema = IoUtil.toBytes(rawSchema); parsed = schemaParser.parseSchema(schema, resolvedReferences); - result.parsedSchema(new ParsedSchemaImpl() - .setParsedSchema(parsed) - .setRawSchema(schema)); + result.parsedSchema(new ParsedSchemaImpl().setParsedSchema(parsed).setRawSchema(schema)); return result.build(); } diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ERCache.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ERCache.java index 2362b43da2..c559416e3e 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ERCache.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ERCache.java @@ -57,20 +57,20 @@ public void configureRetryCount(long retries) { } /** - * If {@code true}, will cache schema lookups that either have `latest` or no version specified. Setting this to false - * will effectively disable caching for schema lookups that do not specify a version. + * If {@code true}, will cache schema lookups that either have `latest` or no version specified. Setting + * this to false will effectively disable caching for schema lookups that do not specify a version. * - * @param cacheLatest Whether to enable cache of artifacts without a version specified. + * @param cacheLatest Whether to enable cache of artifacts without a version specified. */ public void configureCacheLatest(boolean cacheLatest) { this.cacheLatest = cacheLatest; } /** - * If set to {@code true}, will log the load error instead of throwing it when an exception occurs trying to refresh - * a cache entry. This will still honor retries before enacting this behavior. + * If set to {@code true}, will log the load error instead of throwing it when an exception occurs trying + * to refresh a cache entry. This will still honor retries before enacting this behavior. * - * @param faultTolerantRefresh Whether to enable fault tolerant refresh behavior. + * @param faultTolerantRefresh Whether to enable fault tolerant refresh behavior. */ public void configureFaultTolerantRefresh(boolean faultTolerantRefresh) { this.faultTolerantRefresh = faultTolerantRefresh; @@ -99,7 +99,7 @@ public void configureContentHashKeyExtractor(Function keyExtractor) { /** * Return whether caching of artifact lookups with {@code null} versions is enabled. * - * @return {@code true} if it's enabled. + * @return {@code true} if it's enabled. * @see #configureCacheLatest(boolean) */ public boolean isCacheLatest() { @@ -109,7 +109,7 @@ public boolean isCacheLatest() { /** * Return whether fault tolerant refresh is enabled. * - * @return {@code true} if it's enabled. + * @return {@code true} if it's enabled. * @see #configureFaultTolerantRefresh(boolean) */ public boolean isFaultTolerantRefresh() { @@ -117,14 +117,13 @@ public boolean isFaultTolerantRefresh() { } public void checkInitialized() { - boolean initialized = keyExtractor1 != null && keyExtractor2 != null && - keyExtractor3 != null && keyExtractor4 != null && keyExtractor5 != null; + boolean initialized = keyExtractor1 != null && keyExtractor2 != null && keyExtractor3 != null + && keyExtractor4 != null && keyExtractor5 != null; initialized = initialized && lifetime != null && backoff != null && retries >= 0; if (!initialized) throw new IllegalStateException("Not properly initialized!"); } - public boolean containsByGlobalId(Long key) { WrappedValue value = this.index1.get(key); return value != null && !value.isExpired(); @@ -160,7 +159,8 @@ public V getByContentId(Long key, Function loaderFunction) { return getValue(value, key, loaderFunction); } - public V getByArtifactCoordinates(ArtifactCoordinates key, Function loaderFunction) { + public V getByArtifactCoordinates(ArtifactCoordinates key, + Function loaderFunction) { WrappedValue value = this.index4.get(key); return getValue(value, key, loaderFunction); } @@ -202,7 +202,8 @@ private void reindex(WrappedValue newValue, T lookupKey) { Optional.ofNullable(keyExtractor3.apply(newValue.value)).ifPresent(k -> index3.put(k, newValue)); Optional.ofNullable(keyExtractor4.apply(newValue.value)).ifPresent(k -> { index4.put(k, newValue); - // By storing the lookup key, we ensure that a null/latest lookup gets cached, as the key extractor will + // By storing the lookup key, we ensure that a null/latest lookup gets cached, as the key + // extractor will // automatically add the version to the new key if (this.cacheLatest && k.getClass().equals(lookupKey.getClass())) { index4.put((ArtifactCoordinates) lookupKey, newValue); @@ -221,7 +222,8 @@ public void clear() { // === Util & Other - private static Result retry(Duration backoff, long retries, Supplier supplier) { + private static Result retry(Duration backoff, long retries, + Supplier supplier) { if (retries < 0) throw new IllegalArgumentException(); Objects.requireNonNull(supplier); @@ -233,14 +235,14 @@ private static Result retry(Duration backoff, long retr if (value != null) return Result.ok(value); else { - return Result.error(new NullPointerException("Could not retrieve schema for the cache. " + - "Loading function returned null.")); + return Result.error(new NullPointerException( + "Could not retrieve schema for the cache. " + "Loading function returned null.")); } } catch (RuntimeException e) { // TODO: verify if this is really needed, retries are already baked into the adapter ... -// if (i == retries || !(e.getCause() != null && e.getCause() instanceof ExecutionException -// && e.getCause().getCause() != null && e.getCause().getCause() instanceof ApiException -// && (((ApiException) e.getCause().getCause()).getResponseStatusCode() == 429))) + // if (i == retries || !(e.getCause() != null && e.getCause() instanceof ExecutionException + // && e.getCause().getCause() != null && e.getCause().getCause() instanceof ApiException + // && (((ApiException) e.getCause().getCause()).getResponseStatusCode() == 429))) if (i == retries || !(e.getCause() != null && e.getCause() instanceof ApiException && (((ApiException) e.getCause()).getResponseStatusCode() == 429))) return Result.error(new RuntimeException(e)); diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchema.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchema.java index bd3397a55a..27b0d9a577 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchema.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchema.java @@ -25,12 +25,10 @@ public interface ParsedSchema { public boolean hasReferences(); /** - * * @return the name to be used when referencing this schema */ public String referenceName(); - /** * set the name to be used when referencing this schema */ diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchemaImpl.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchemaImpl.java index 6845bbbdca..6bf9d2be61 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchemaImpl.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/ParsedSchemaImpl.java @@ -10,7 +10,7 @@ public class ParsedSchemaImpl implements ParsedSchema { private String referenceName; public ParsedSchemaImpl() { - //empty + // empty } /** diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaLookupResult.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaLookupResult.java index e38c775b42..c3e03721d0 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaLookupResult.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaLookupResult.java @@ -15,7 +15,7 @@ public class SchemaLookupResult { private String version; private SchemaLookupResult() { - //empty initialize manually + // empty initialize manually } /** @@ -68,22 +68,14 @@ public String getVersion() { } public ArtifactReference toArtifactReference() { - return ArtifactReference.builder() - .globalId(this.getGlobalId()) - .contentId(this.getContentId()) - .contentHash(this.getContentHash()) - .groupId(this.getGroupId()) - .artifactId(this.getArtifactId()) - .version(this.getVersion()) - .build(); + return ArtifactReference.builder().globalId(this.getGlobalId()).contentId(this.getContentId()) + .contentHash(this.getContentHash()).groupId(this.getGroupId()) + .artifactId(this.getArtifactId()).version(this.getVersion()).build(); } public ArtifactCoordinates toArtifactCoordinates() { - return ArtifactCoordinates.builder() - .groupId(this.getGroupId()) - .artifactId(this.getArtifactId()) - .version(this.getVersion()) - .build(); + return ArtifactCoordinates.builder().groupId(this.getGroupId()).artifactId(this.getArtifactId()) + .version(this.getVersion()).build(); } public static SchemaLookupResultBuilder builder() { diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaParser.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaParser.java index 1e290e6062..42459432d5 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaParser.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaParser.java @@ -11,9 +11,9 @@ public interface SchemaParser { S parseSchema(byte[] rawSchema, Map> resolvedReferences); /** - * In some artifact types, such as AVRO, it is possible to extract the schema from the java object. - * But this can be easily extended to other formats by using a custom {@link Record} implementation that adds additional fields - * that allows to build a {@link ParsedSchema} + * In some artifact types, such as AVRO, it is possible to extract the schema from the java object. But + * this can be easily extended to other formats by using a custom {@link Record} implementation that adds + * additional fields that allows to build a {@link ParsedSchema} * * @param data * @return the ParsedSchema, containing both the raw schema (bytes) and the parsed schema. Can be null. @@ -21,9 +21,9 @@ public interface SchemaParser { ParsedSchema getSchemaFromData(Record data); /** - * In some artifact types, such as AVRO, it is possible to extract the schema from the java object. - * But this can be easily extended to other formats by using a custom {@link Record} implementation that adds additional fields - * that allows to build a {@link ParsedSchema} + * In some artifact types, such as AVRO, it is possible to extract the schema from the java object. But + * this can be easily extended to other formats by using a custom {@link Record} implementation that adds + * additional fields that allows to build a {@link ParsedSchema} * * @param data * @param dereference indicate the schema parser whether to try to dereference the record schema. diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolver.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolver.java index a3ff08200c..c075a69be2 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolver.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolver.java @@ -1,14 +1,14 @@ package io.apicurio.registry.resolver; -import java.io.Closeable; -import java.util.Map; - import io.apicurio.registry.resolver.data.Record; +import io.apicurio.registry.resolver.strategy.ArtifactReference; import io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; -import io.apicurio.registry.resolver.strategy.ArtifactReference; import io.apicurio.registry.rest.client.RegistryClient; +import java.io.Closeable; +import java.util.Map; + public interface SchemaResolver extends Closeable { /** @@ -22,19 +22,24 @@ default void configure(Map configs, SchemaParser schema public void setClient(RegistryClient client); - public void setArtifactResolverStrategy(ArtifactReferenceResolverStrategy artifactResolverStrategy); + public void setArtifactResolverStrategy( + ArtifactReferenceResolverStrategy artifactResolverStrategy); public SchemaParser getSchemaParser(); /** * Used to register or to lookup a schema in the registry - * @param data, record containing metadata about it that can be used by the resolver to lookup a schema in the registry + * + * @param data, record containing metadata about it that can be used by the resolver to lookup a schema in + * the registry * @return SchemaLookupResult */ public SchemaLookupResult resolveSchema(Record data); /** - * The schema resolver may use different pieces of information from the {@link ArtifactReferenceImpl} depending on the configuration of the schema resolver. + * The schema resolver may use different pieces of information from the {@link ArtifactReferenceImpl} + * depending on the configuration of the schema resolver. + * * @param reference * @return SchemaLookupResult */ diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolverConfig.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolverConfig.java index fe59aa3219..79b1bf33b9 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolverConfig.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/SchemaResolverConfig.java @@ -10,94 +10,99 @@ public class SchemaResolverConfig { /** - * Fully qualified Java classname of a class that implements {@link ArtifactReferenceResolverStrategy} and is - * responsible for mapping between the Record being resolved and an artifactId. For example - * there is a strategy to use the topic name as the schema's artifactId. Only used by {@link SchemaResolver#resolveSchema(io.apicurio.registry.resolver.data.Record)} + * Fully qualified Java classname of a class that implements {@link ArtifactReferenceResolverStrategy} and + * is responsible for mapping between the Record being resolved and an artifactId. For example there is a + * strategy to use the topic name as the schema's artifactId. Only used by + * {@link SchemaResolver#resolveSchema(io.apicurio.registry.resolver.data.Record)} */ public static final String ARTIFACT_RESOLVER_STRATEGY = "apicurio.registry.artifact-resolver-strategy"; /** - * Uses the ArtifactReference available for each record. Requires {@link Metadata#artifactReference()} to be set. - * Note this default artifact resolver strategy differs in behavior from the classic Kafka serdes ArtifactResolverStrategy + * Uses the ArtifactReference available for each record. Requires {@link Metadata#artifactReference()} to + * be set. Note this default artifact resolver strategy differs in behavior from the classic Kafka serdes + * ArtifactResolverStrategy */ - public static final String ARTIFACT_RESOLVER_STRATEGY_DEFAULT = DynamicArtifactReferenceResolverStrategy.class.getName(); - + public static final String ARTIFACT_RESOLVER_STRATEGY_DEFAULT = DynamicArtifactReferenceResolverStrategy.class + .getName(); /** - * Optional, boolean to indicate whether serializer classes should attempt to create an artifact in the registry. - * Note: JsonSchema serializer does not support this feature yet. + * Optional, boolean to indicate whether serializer classes should attempt to create an artifact in the + * registry. Note: JsonSchema serializer does not support this feature yet. */ public static final String AUTO_REGISTER_ARTIFACT = "apicurio.registry.auto-register"; public static final boolean AUTO_REGISTER_ARTIFACT_DEFAULT = false; /** - * Optional, one of "IfExists" to indicate the behavior of the client when there is a conflict creating an artifact because the artifact already exists. + * Optional, one of "IfExists" to indicate the behavior of the client when there is a conflict creating an + * artifact because the artifact already exists. */ public static final String AUTO_REGISTER_ARTIFACT_IF_EXISTS = "apicurio.registry.auto-register.if-exists"; public static final String AUTO_REGISTER_ARTIFACT_IF_EXISTS_DEFAULT = "FIND_OR_CREATE_VERSION"; /** - * Optional, boolean to indicate whether serializer classes should attempt to find the latest artifact in the registry for the corresponding groupId/artifactId. - * GroupId and artifactId are configured either via {@link ArtifactReferenceResolverStrategy} or via config properties such as {@link SchemaResolverConfig#EXPLICIT_ARTIFACT_ID}. + * Optional, boolean to indicate whether serializer classes should attempt to find the latest artifact in + * the registry for the corresponding groupId/artifactId. GroupId and artifactId are configured either via + * {@link ArtifactReferenceResolverStrategy} or via config properties such as + * {@link SchemaResolverConfig#EXPLICIT_ARTIFACT_ID}. */ public static final String FIND_LATEST_ARTIFACT = "apicurio.registry.find-latest"; public static final boolean FIND_LATEST_ARTIFACT_DEFAULT = false; /** - * If {@code true}, will cache schema lookups that either have `latest` or no version specified. Setting this to false - * will effectively disable caching for schema lookups that do not specify a version. + * If {@code true}, will cache schema lookups that either have `latest` or no version specified. Setting + * this to false will effectively disable caching for schema lookups that do not specify a version. */ public static final String CACHE_LATEST = "apicurio.registry.cache-latest"; public static final boolean CACHE_LATEST_DEFAULT = true; /** - * If {@code true}, will log exceptions instead of throwing them when an error occurs trying to refresh a schema - * in the cache. This is useful for production situations where a stale schema is better than completely failing - * schema resolution. Note that this will not impact trying of retries, as retries are attempted before this flag - * is considered. + * If {@code true}, will log exceptions instead of throwing them when an error occurs trying to refresh a + * schema in the cache. This is useful for production situations where a stale schema is better than + * completely failing schema resolution. Note that this will not impact trying of retries, as retries are + * attempted before this flag is considered. */ public static final String FAULT_TOLERANT_REFRESH = "apicurio.registry.fault-tolerant-refresh"; public static final boolean FAULT_TOLERANT_REFRESH_DEFAULT = false; /** - * Only applicable for serializers - * Optional, set explicitly the groupId used for querying/creating an artifact. - * Overrides the groupId returned by the {@link ArtifactReferenceResolverStrategy} + * Only applicable for serializers Optional, set explicitly the groupId used for querying/creating an + * artifact. Overrides the groupId returned by the {@link ArtifactReferenceResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_GROUP_ID = "apicurio.registry.artifact.group-id"; /** - * Only applicable for serializers - * Optional, set explicitly the artifactId used for querying/creating an artifact. - * Overrides the artifactId returned by the {@link ArtifactReferenceResolverStrategy} + * Only applicable for serializers Optional, set explicitly the artifactId used for querying/creating an + * artifact. Overrides the artifactId returned by the {@link ArtifactReferenceResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_ID = "apicurio.registry.artifact.artifact-id"; /** - * Only applicable for serializers - * Optional, set explicitly the schema location in the classpath for the schema to be used for serializing the data. + * Only applicable for serializers Optional, set explicitly the schema location in the classpath for the + * schema to be used for serializing the data. */ public static final String SCHEMA_LOCATION = "apicurio.registry.artifact.schema.location"; /** - * Only applicable for serializers - * Optional, set explicitly the version used for querying/creating an artifact. - * Overrides the version returned by the {@link ArtifactReferenceResolverStrategy} + * Only applicable for serializers Optional, set explicitly the version used for querying/creating an + * artifact. Overrides the version returned by the {@link ArtifactReferenceResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_VERSION = "apicurio.registry.artifact.version"; /** - * The URL of the Apicurio Registry. Required when using any Apicurio Registry serde class (serializer or deserializer). + * The URL of the Apicurio Registry. Required when using any Apicurio Registry serde class (serializer or + * deserializer). */ public static final String REGISTRY_URL = "apicurio.registry.url"; /** - * The URL of the Token Endpoint. Required when using any Apicurio Registry serde class (serializer or deserializer) against a secured Apicurio Registry and AUTH_SERVICE_URL is not specified. + * The URL of the Token Endpoint. Required when using any Apicurio Registry serde class (serializer or + * deserializer) against a secured Apicurio Registry and AUTH_SERVICE_URL is not specified. */ public static final String AUTH_TOKEN_ENDPOINT = "apicurio.auth.service.token.endpoint"; /** - * The URL of the Auth Service. Required when using any Apicurio Registry serde class (serializer or deserializer) against a secured Apicurio Registry. + * The URL of the Auth Service. Required when using any Apicurio Registry serde class (serializer or + * deserializer) against a secured Apicurio Registry. */ public static final String AUTH_SERVICE_URL = "apicurio.auth.service.url"; public static final String AUTH_SERVICE_URL_TOKEN_ENDPOINT = "/realms/%s/protocol/openid-connect/token"; @@ -133,37 +138,39 @@ public class SchemaResolverConfig { public static final String AUTH_PASSWORD = "apicurio.auth.password"; /** - * Indicates how long to cache artifacts before auto-eviction. If not included, the artifact will be fetched every time. + * Indicates how long to cache artifacts before auto-eviction. If not included, the artifact will be + * fetched every time. */ public static final String CHECK_PERIOD_MS = "apicurio.registry.check-period-ms"; public static final long CHECK_PERIOD_MS_DEFAULT = 30000; /** - * If a schema can not be retrieved from the Registry, serdes may retry a number of times. - * This configuration option controls the number of retries before failing. - * Valid values are non-negative integers. + * If a schema can not be retrieved from the Registry, serdes may retry a number of times. This + * configuration option controls the number of retries before failing. Valid values are non-negative + * integers. */ public static final String RETRY_COUNT = "apicurio.registry.retry-count"; public static final long RETRY_COUNT_DEFAULT = 3; /** - * If a schema can not be be retrieved from the Registry, serdes may retry a number of times. - * This configuration option controls the delay between the retry attempts, in milliseconds. - * Valid values are non-negative integers. + * If a schema can not be be retrieved from the Registry, serdes may retry a number of times. This + * configuration option controls the delay between the retry attempts, in milliseconds. Valid values are + * non-negative integers. */ public static final String RETRY_BACKOFF_MS = "apicurio.registry.retry-backoff-ms"; public static final long RETRY_BACKOFF_MS_DEFAULT = 300; /** - * Used to indicate the auto-register feature to try to dereference the schema before registering it in Registry. Only supported for Avro. - * Only applicable when {@link SchemaResolverConfig#AUTO_REGISTER_ARTIFACT} is enabled. + * Used to indicate the auto-register feature to try to dereference the schema before registering it in + * Registry. Only supported for Avro. Only applicable when + * {@link SchemaResolverConfig#AUTO_REGISTER_ARTIFACT} is enabled. */ public static final String DEREFERENCE_SCHEMA = "apicurio.registry.dereference-schema"; public static final boolean DEREFERENCE_SCHEMA_DEFAULT = true; /** - * Used to indicate the deserializer to ask Registry to return the schema dereferenced. This is useful to reduce the number of http requests to the server. - * Only applicable to Avro schemas. + * Used to indicate the deserializer to ask Registry to return the schema dereferenced. This is useful to + * reduce the number of http requests to the server. Only applicable to Avro schemas. */ public static final String DESERIALIZER_DEREFERENCE_SCHEMA = "apicurio.registry.deserializer.dereference-schema"; public static final boolean DESERIALIZER_DEREFERENCE_SCHEMA_DEFAULT = false; diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/config/DefaultSchemaResolverConfig.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/config/DefaultSchemaResolverConfig.java index 1004878894..19f8d1e2ec 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/config/DefaultSchemaResolverConfig.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/config/DefaultSchemaResolverConfig.java @@ -8,7 +8,6 @@ import static io.apicurio.registry.resolver.SchemaResolverConfig.*; import static java.util.Map.entry; - public class DefaultSchemaResolverConfig { private static final Map DEFAULTS = Map.ofEntries( @@ -18,12 +17,10 @@ public class DefaultSchemaResolverConfig { entry(CACHE_LATEST, CACHE_LATEST_DEFAULT), entry(FAULT_TOLERANT_REFRESH, FAULT_TOLERANT_REFRESH_DEFAULT), entry(FIND_LATEST_ARTIFACT, FIND_LATEST_ARTIFACT_DEFAULT), - entry(CHECK_PERIOD_MS, CHECK_PERIOD_MS_DEFAULT), - entry(RETRY_COUNT, RETRY_COUNT_DEFAULT), + entry(CHECK_PERIOD_MS, CHECK_PERIOD_MS_DEFAULT), entry(RETRY_COUNT, RETRY_COUNT_DEFAULT), entry(RETRY_BACKOFF_MS, RETRY_BACKOFF_MS_DEFAULT), entry(DEREFERENCE_SCHEMA, DEREFERENCE_SCHEMA_DEFAULT), - entry(DESERIALIZER_DEREFERENCE_SCHEMA, DESERIALIZER_DEREFERENCE_SCHEMA_DEFAULT) - ); + entry(DESERIALIZER_DEREFERENCE_SCHEMA, DESERIALIZER_DEREFERENCE_SCHEMA_DEFAULT)); private Map originals; @@ -77,7 +74,8 @@ public boolean autoRegisterArtifact() { } public String autoRegisterArtifactIfExists() { - return getStringOneOf(AUTO_REGISTER_ARTIFACT_IF_EXISTS, "FAIL", "CREATE_VERSION", "FIND_OR_CREATE_VERSION"); + return getStringOneOf(AUTO_REGISTER_ARTIFACT_IF_EXISTS, "FAIL", "CREATE_VERSION", + "FIND_OR_CREATE_VERSION"); } public boolean getCacheLatest() { @@ -186,7 +184,6 @@ private long getLongNonNegative(String key) { return result; } - private String getString(String key) { Object value = getObject(key); if (value == null) { @@ -237,7 +234,7 @@ else if (trimmed.equalsIgnoreCase("false")) } private void reportError(String key, String expectedText, Object value) { - throw new IllegalArgumentException("Invalid configuration property value for '" + key + "'. " + - "Expected " + expectedText + ", but got a '" + value + "'."); + throw new IllegalArgumentException("Invalid configuration property value for '" + key + "'. " + + "Expected " + expectedText + ", but got a '" + value + "'."); } } diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/data/Record.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/data/Record.java index a240148ebc..279d51d593 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/data/Record.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/data/Record.java @@ -1,9 +1,8 @@ package io.apicurio.registry.resolver.data; /** - * Record defines an object that is known as the data or the payload of the record and it's associated metadata. - * A record can be message to be sent or simply an object that can be serialized and deserialized. - * + * Record defines an object that is known as the data or the payload of the record and it's associated + * metadata. A record can be message to be sent or simply an object that can be serialized and deserialized. */ public interface Record { diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactCoordinates.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactCoordinates.java index 8b2eb292cc..e238fb9c6c 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactCoordinates.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactCoordinates.java @@ -2,7 +2,6 @@ public class ArtifactCoordinates { - private String groupId; private String artifactId; @@ -10,7 +9,7 @@ public class ArtifactCoordinates { private String version; protected ArtifactCoordinates() { - //empty initialize using setters + // empty initialize using setters } public String getGroupId() { @@ -91,14 +90,13 @@ public boolean equals(Object obj) { */ @Override public String toString() { - return "ArtifactCoordinates [groupId=" + groupId + ", artifactId=" + artifactId + ", version=" + version + "]"; + return "ArtifactCoordinates [groupId=" + groupId + ", artifactId=" + artifactId + ", version=" + + version + "]"; } public static ArtifactCoordinates fromArtifactReference(ArtifactReference artifactReference) { - return builder().artifactId(artifactReference.getArtifactId()) - .groupId(artifactReference.getGroupId()) - .version(artifactReference.getVersion()) - .build(); + return builder().artifactId(artifactReference.getArtifactId()).groupId(artifactReference.getGroupId()) + .version(artifactReference.getVersion()).build(); } public static ArtifactCoordinatesBuilder builder() { diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReference.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReference.java index 1dea8f6542..a6e6dbe111 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReference.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReference.java @@ -5,7 +5,6 @@ /** * This class holds the information that reference one Artifact in Apicurio Registry. It will always make * reference to an artifact in a group. Optionally it can reference to a specific version. - * */ public interface ArtifactReference { @@ -48,8 +47,7 @@ public interface ArtifactReference { int hashCode(); /** - * Logical equality. Two artifact references are equal, if they - * MUST refer to the same artifact. + * Logical equality. Two artifact references are equal, if they MUST refer to the same artifact. */ @Override boolean equals(Object obj); @@ -64,7 +62,7 @@ public static ArtifactReference fromGlobalId(Long globalId) { return builder().globalId(globalId).build(); } - public static ArtifactReferenceBuilder builder(){ + public static ArtifactReferenceBuilder builder() { return new ArtifactReferenceBuilder(); } diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImpl.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImpl.java index df80bc1b49..4f8b39f3da 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImpl.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImpl.java @@ -4,7 +4,6 @@ /** * @see ArtifactReference - * */ public class ArtifactReferenceImpl implements ArtifactReference { @@ -27,7 +26,8 @@ public class ArtifactReferenceImpl implements ArtifactReference { private Long globalId; /** - * Optional, unless the rest of the fields are empty or {@link SchemaResolverConfig#USE_ID} is configured with IdOption.contentId + * Optional, unless the rest of the fields are empty or {@link SchemaResolverConfig#USE_ID} is configured + * with IdOption.contentId */ private Long contentId; @@ -37,7 +37,7 @@ public class ArtifactReferenceImpl implements ArtifactReference { private String contentHash; protected ArtifactReferenceImpl() { - //empty initialize using setters + // empty initialize using setters } /** @@ -160,7 +160,7 @@ public boolean equals(Object obj) { ArtifactReferenceImpl other = (ArtifactReferenceImpl) obj; boolean match1 = false; - if(globalId != null && other.globalId != null) { + if (globalId != null && other.globalId != null) { if (!globalId.equals(other.globalId)) { return false; } else { @@ -169,7 +169,7 @@ public boolean equals(Object obj) { } boolean match2 = false; - if(contentId != null && other.contentId != null) { + if (contentId != null && other.contentId != null) { if (!contentId.equals(other.contentId)) { return false; } else { @@ -178,7 +178,7 @@ public boolean equals(Object obj) { } boolean match3 = false; - if(contentHash != null && other.contentHash != null) { + if (contentHash != null && other.contentHash != null) { if (!contentHash.equals(other.contentHash)) { return false; } else { @@ -187,7 +187,7 @@ public boolean equals(Object obj) { } boolean match4 = false; - if(groupId != null && other.groupId != null) { + if (groupId != null && other.groupId != null) { if (!groupId.equals(other.groupId)) { return false; } else { @@ -196,7 +196,7 @@ public boolean equals(Object obj) { } boolean match5 = false; - if(artifactId != null && other.artifactId != null) { + if (artifactId != null && other.artifactId != null) { if (!artifactId.equals(other.artifactId)) { return false; } else { @@ -205,7 +205,7 @@ public boolean equals(Object obj) { } boolean match6 = false; - if(version != null && other.version != null) { + if (version != null && other.version != null) { if (!version.equals(other.version)) { return false; } else { @@ -222,7 +222,8 @@ public boolean equals(Object obj) { @Override public String toString() { return "ArtifactReference [groupId=" + groupId + ", artifactId=" + artifactId + ", version=" + version - + ", globalId=" + globalId + ", contentId=" + contentId + ", contentHash=" + contentHash + "]"; + + ", globalId=" + globalId + ", contentId=" + contentId + ", contentHash=" + contentHash + + "]"; } public static class ArtifactReferenceBuilder { diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceResolverStrategy.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceResolverStrategy.java index f39e7475dd..a1b5e20fc0 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceResolverStrategy.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceResolverStrategy.java @@ -4,24 +4,26 @@ import io.apicurio.registry.resolver.data.Record; /** - * This interface is used by the SchemaResolver to determine - * the {@link ArtifactReference} under which the message schemas are located or should be registered - * in the registry. - * + * This interface is used by the SchemaResolver to determine the {@link ArtifactReference} under which the + * message schemas are located or should be registered in the registry. */ public interface ArtifactReferenceResolverStrategy { /** - * For a given Record, returns the {@link ArtifactReference} under which the message schemas are located or should be registered - * in the registry. + * For a given Record, returns the {@link ArtifactReference} under which the message schemas are located + * or should be registered in the registry. + * * @param data record for which we want to resolve the ArtifactReference - * @param parsedSchema the schema of the record being resolved, can be null if {@link ArtifactReferenceResolverStrategy#loadSchema()} is set to false - * @return the {@link ArtifactReference} under which the message schemas are located or should be registered + * @param parsedSchema the schema of the record being resolved, can be null if + * {@link ArtifactReferenceResolverStrategy#loadSchema()} is set to false + * @return the {@link ArtifactReference} under which the message schemas are located or should be + * registered */ ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema); /** - * Whether or not to load and pass the parsed schema to the {@link ArtifactReferenceResolverStrategy#artifactReference(Record, ParsedSchema)} lookup method + * Whether or not to load and pass the parsed schema to the + * {@link ArtifactReferenceResolverStrategy#artifactReference(Record, ParsedSchema)} lookup method */ default boolean loadSchema() { return true; diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/DynamicArtifactReferenceResolverStrategy.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/DynamicArtifactReferenceResolverStrategy.java index fb01938b61..7340316ef7 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/DynamicArtifactReferenceResolverStrategy.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/strategy/DynamicArtifactReferenceResolverStrategy.java @@ -5,18 +5,23 @@ import io.apicurio.registry.resolver.data.Record; /** - * {@link ArtifactReferenceResolverStrategy} implementation that simply returns {@link Metadata#artifactReference()} from the given {@link Record} + * {@link ArtifactReferenceResolverStrategy} implementation that simply returns + * {@link Metadata#artifactReference()} from the given {@link Record} */ -public class DynamicArtifactReferenceResolverStrategy implements ArtifactReferenceResolverStrategy { +public class DynamicArtifactReferenceResolverStrategy + implements ArtifactReferenceResolverStrategy { /** - * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, io.apicurio.registry.resolver.ParsedSchema) + * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, + * io.apicurio.registry.resolver.ParsedSchema) */ @Override public ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { ArtifactReference reference = data.metadata().artifactReference(); if (reference == null) { - throw new IllegalStateException("Wrong configuration. Missing metadata.artifactReference in Record, it's required by " + this.getClass().getName()); + throw new IllegalStateException( + "Wrong configuration. Missing metadata.artifactReference in Record, it's required by " + + this.getClass().getName()); } return reference; } diff --git a/schema-resolver/src/main/java/io/apicurio/registry/resolver/utils/Utils.java b/schema-resolver/src/main/java/io/apicurio/registry/resolver/utils/Utils.java index afe260bf66..f9ee0a3a6e 100644 --- a/schema-resolver/src/main/java/io/apicurio/registry/resolver/utils/Utils.java +++ b/schema-resolver/src/main/java/io/apicurio/registry/resolver/utils/Utils.java @@ -17,26 +17,26 @@ public static Class loadClass(String javaType) { } } - - //TODO make the instantiation mechanism configurable + // TODO make the instantiation mechanism configurable @SuppressWarnings("unchecked") public static void instantiate(Class type, Object value, Consumer setter) { if (value != null) { if (type.isInstance(value)) { setter.accept(type.cast(value)); } else if (value instanceof Class && type.isAssignableFrom((Class) value)) { - //noinspection unchecked + // noinspection unchecked setter.accept(instantiate((Class) value)); } else if (value instanceof String) { Class clazz = loadClass((String) value); setter.accept(instantiate(clazz)); } else { - throw new IllegalArgumentException(String.format("Cannot handle configuration [%s]: %s", type.getName(), value)); + throw new IllegalArgumentException( + String.format("Cannot handle configuration [%s]: %s", type.getName(), value)); } } } - //TODO make the instantiation mechanism configurable + // TODO make the instantiation mechanism configurable public static V instantiate(Class clazz) { try { return clazz.getConstructor().newInstance(); diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/AbstractSchemaResolverTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/AbstractSchemaResolverTest.java index 5ed0dc704f..b6eba4f305 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/AbstractSchemaResolverTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/AbstractSchemaResolverTest.java @@ -1,25 +1,27 @@ package io.apicurio.registry.resolver; +import io.apicurio.registry.resolver.data.Record; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import org.junit.jupiter.api.Test; + import java.util.Collections; import java.util.HashMap; import java.util.Map; -import org.junit.jupiter.api.Test; - -import io.apicurio.registry.resolver.data.Record; -import io.apicurio.registry.resolver.strategy.ArtifactReference; - import static org.junit.jupiter.api.Assertions.*; public class AbstractSchemaResolverTest { @Test void testConfigureInitializesSchemaCache() throws Exception { - Map configs = Collections.singletonMap(SchemaResolverConfig.REGISTRY_URL, "http://localhost"); + Map configs = Collections.singletonMap(SchemaResolverConfig.REGISTRY_URL, + "http://localhost"); try (TestAbstractSchemaResolver resolver = new TestAbstractSchemaResolver<>()) { resolver.configure(configs, null); - assertDoesNotThrow(() -> {resolver.schemaCache.checkInitialized();}); + assertDoesNotThrow(() -> { + resolver.schemaCache.checkInitialized(); + }); } } @@ -69,8 +71,9 @@ public SchemaLookupResult resolveSchema(Record data) { @Override public SchemaLookupResult resolveSchemaByArtifactReference(ArtifactReference reference) { - throw new UnsupportedOperationException("Unimplemented method 'resolveSchemaByArtifactReference'"); + throw new UnsupportedOperationException( + "Unimplemented method 'resolveSchemaByArtifactReference'"); } - + } } diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/DefaultSchemaResolverTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/DefaultSchemaResolverTest.java index 2b28b6faf7..03d8d6e325 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/DefaultSchemaResolverTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/DefaultSchemaResolverTest.java @@ -1,16 +1,15 @@ package io.apicurio.registry.resolver; -import static org.junit.jupiter.api.Assertions.assertEquals; +import com.microsoft.kiota.RequestAdapter; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.rest.client.RegistryClient; +import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; -import com.microsoft.kiota.RequestAdapter; -import org.junit.jupiter.api.Test; - -import io.apicurio.registry.resolver.strategy.ArtifactReference; -import io.apicurio.registry.rest.client.RegistryClient; +import static org.junit.jupiter.api.Assertions.assertEquals; public class DefaultSchemaResolverTest { @Test @@ -29,7 +28,8 @@ void testCanResolveArtifactByContentHash() { SchemaLookupResult result = resolver.resolveSchemaByArtifactReference(reference); assertEquals(contentHash, result.getContentHash()); - assertEquals(schemaContent, new String(result.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); + assertEquals(schemaContent, + new String(result.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); } @Test @@ -42,16 +42,18 @@ void testCachesArtifactsResolvedByContentHash() { resolver.setClient(client); Map configs = new HashMap<>(); SchemaParser schemaParser = new MockSchemaParser(); - resolver.configure(configs, schemaParser); + resolver.configure(configs, schemaParser); ArtifactReference reference = ArtifactReference.builder().contentHash(contentHash).build(); SchemaLookupResult result1 = resolver.resolveSchemaByArtifactReference(reference); SchemaLookupResult result2 = resolver.resolveSchemaByArtifactReference(reference); assertEquals(contentHash, result1.getContentHash()); - assertEquals(schemaContent, new String(result1.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); + assertEquals(schemaContent, + new String(result1.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); assertEquals(contentHash, result2.getContentHash()); - assertEquals(schemaContent, new String(result2.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); + assertEquals(schemaContent, + new String(result2.getParsedSchema().getRawSchema(), StandardCharsets.UTF_8)); assertEquals(1, adapter.timesGetContentByHashCalled); } diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/ERCacheTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/ERCacheTest.java index 27dc4c2a7c..c5f126a043 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/ERCacheTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/ERCacheTest.java @@ -1,18 +1,17 @@ package io.apicurio.registry.resolver; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; +import io.apicurio.registry.resolver.strategy.ArtifactCoordinates; +import org.junit.jupiter.api.Test; import java.time.Duration; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; -import org.junit.jupiter.api.Test; - -import io.apicurio.registry.resolver.strategy.ArtifactCoordinates; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public class ERCacheTest { @@ -26,24 +25,36 @@ void testCheckInitializedPassesWithContentHashKeyExtractor() { @Test void testCheckInitializedFailsWithoutContentHashKeyExtractor() { ERCache cache = new ERCache<>(); - Function globalIdKeyExtractor = (o) -> {return 1L;}; - Function contentIdKeyExtractor = (o) -> {return 2L;}; - Function artifactKeyExtractor = (o) -> {return ArtifactCoordinates.builder().artifactId("artifact id").build();}; - Function contentKeyExtractor = (o) -> {return "content";}; + Function globalIdKeyExtractor = (o) -> { + return 1L; + }; + Function contentIdKeyExtractor = (o) -> { + return 2L; + }; + Function artifactKeyExtractor = (o) -> { + return ArtifactCoordinates.builder().artifactId("artifact id").build(); + }; + Function contentKeyExtractor = (o) -> { + return "content"; + }; cache.configureGlobalIdKeyExtractor(globalIdKeyExtractor); cache.configureContentIdKeyExtractor(contentIdKeyExtractor); cache.configureArtifactCoordinatesKeyExtractor(artifactKeyExtractor); cache.configureContentKeyExtractor(contentKeyExtractor); - assertThrows(IllegalStateException.class, () -> {cache.checkInitialized();}); + assertThrows(IllegalStateException.class, () -> { + cache.checkInitialized(); + }); } @Test void testContainsByContentHash() { String contentHashKey = "some key"; ERCache cache = newCache(contentHashKey); - Function staticValueLoader = (key) -> {return "present";}; + Function staticValueLoader = (key) -> { + return "present"; + }; assertFalse(cache.containsByContentHash(contentHashKey)); cache.getByContentHash(contentHashKey, staticValueLoader); @@ -55,8 +66,12 @@ void testContainsByContentHash() { void testGetByContentHash() { String contentHashKey = "content hash key"; ERCache cache = newCache(contentHashKey); - Function staticValueLoader = (key) -> {return "value";}; - Function ensureCachedLoader = (key) -> {throw new IllegalStateException("this should've been cached");}; + Function staticValueLoader = (key) -> { + return "value"; + }; + Function ensureCachedLoader = (key) -> { + throw new IllegalStateException("this should've been cached"); + }; String uncachedValue = cache.getByContentHash(contentHashKey, staticValueLoader); assertEquals("value", uncachedValue); @@ -71,8 +86,12 @@ void testGetByContentHashEnforcesTTL() { String contentHashKey = "content hash ttl key"; ERCache cache = newCache(contentHashKey); cache.configureLifetime(Duration.ZERO); - Function firstLoader = (key) -> {return "a value";}; - Function secondLoader = (key) -> {return "another value";}; + Function firstLoader = (key) -> { + return "a value"; + }; + Function secondLoader = (key) -> { + return "another value"; + }; String firstValue = cache.getByContentHash(contentHashKey, firstLoader); assertEquals("a value", firstValue); @@ -84,7 +103,9 @@ void testGetByContentHashEnforcesTTL() { void testClearEmptiesContentHashIndex() { String contentHashKey = "another key"; ERCache cache = newCache(contentHashKey); - Function staticValueLoader = (key) -> {return "some value";}; + Function staticValueLoader = (key) -> { + return "some value"; + }; cache.getByContentHash(contentHashKey, staticValueLoader); cache.clear(); @@ -96,9 +117,13 @@ void testClearEmptiesContentHashIndex() { void testThrowsLoadExceptionsByDefault() { String contentHashKey = "another key"; ERCache cache = newCache(contentHashKey); - Function staticValueLoader = (key) -> {throw new IllegalStateException("load failure");}; + Function staticValueLoader = (key) -> { + throw new IllegalStateException("load failure"); + }; - assertThrows(RuntimeException.class, () -> {cache.getByContentHash(contentHashKey, staticValueLoader);}); + assertThrows(RuntimeException.class, () -> { + cache.getByContentHash(contentHashKey, staticValueLoader); + }); } @Test @@ -109,11 +134,15 @@ void testHoldsLoadExceptionsWhenFaultTolerantRefreshEnabled() { cache.configureFaultTolerantRefresh(true); // Seed a value - Function workingLoader = (key) -> {return "some value";}; + Function workingLoader = (key) -> { + return "some value"; + }; String originalLoadValue = cache.getByContentHash(contentHashKey, workingLoader); // Refresh with a failing loader - Function failingLoader = (key) -> {throw new IllegalStateException("load failure");}; + Function failingLoader = (key) -> { + throw new IllegalStateException("load failure"); + }; String failingLoadValue = cache.getByContentHash(contentHashKey, failingLoader); assertEquals("some value", originalLoadValue); @@ -127,9 +156,7 @@ void testCanCacheLatestWhenEnabled() { cache.configureCacheLatest(true); ArtifactCoordinates latestKey = new ArtifactCoordinates.ArtifactCoordinatesBuilder() - .artifactId("someArtifactId") - .groupId("someGroupId") - .build(); + .artifactId("someArtifactId").groupId("someGroupId").build(); final AtomicInteger loadCount = new AtomicInteger(0); Function countingLoader = (key) -> { loadCount.incrementAndGet(); @@ -152,9 +179,7 @@ void doesNotCacheLatestWhenDisabled() { cache.configureCacheLatest(false); ArtifactCoordinates latestKey = new ArtifactCoordinates.ArtifactCoordinatesBuilder() - .artifactId("someArtifactId") - .groupId("someGroupId") - .build(); + .artifactId("someArtifactId").groupId("someGroupId").build(); final AtomicInteger loadCount = new AtomicInteger(0); Function countingLoader = (key) -> { loadCount.incrementAndGet(); @@ -173,11 +198,21 @@ void doesNotCacheLatestWhenDisabled() { private ERCache newCache(String contentHashKey) { ERCache cache = new ERCache<>(); cache.configureLifetime(Duration.ofDays(30)); - Function globalIdKeyExtractor = (o) -> {return 1L;}; - Function contentIdKeyExtractor = (o) -> {return 2L;}; - Function contentHashKeyExtractor = (o) -> {return contentHashKey;}; - Function artifactKeyExtractor = (o) -> {return ArtifactCoordinates.builder().artifactId("artifact id").build();}; - Function contentKeyExtractor = (o) -> {return "content";}; + Function globalIdKeyExtractor = (o) -> { + return 1L; + }; + Function contentIdKeyExtractor = (o) -> { + return 2L; + }; + Function contentHashKeyExtractor = (o) -> { + return contentHashKey; + }; + Function artifactKeyExtractor = (o) -> { + return ArtifactCoordinates.builder().artifactId("artifact id").build(); + }; + Function contentKeyExtractor = (o) -> { + return "content"; + }; cache.configureGlobalIdKeyExtractor(globalIdKeyExtractor); cache.configureContentIdKeyExtractor(contentIdKeyExtractor); diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockRequestAdapter.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockRequestAdapter.java index a6fc12c336..d09ed6a67a 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockRequestAdapter.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockRequestAdapter.java @@ -38,40 +38,54 @@ public SerializationWriterFactory getSerializationWriterFactory() { @Nullable @Override - public ModelType send(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull ParsableFactory factory) { + public ModelType send(@Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull ParsableFactory factory) { throw new UnsupportedOperationException("Unimplemented"); } @Nullable @Override - public List sendCollection(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull ParsableFactory factory) { + public List sendCollection( + @Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull ParsableFactory factory) { assertEquals("{+baseurl}/ids/contentHashes/{contentHash}/references", requestInfo.urlTemplate); return List.of(); } @Nullable @Override - public ModelType sendPrimitive(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull Class targetClass) { + public ModelType sendPrimitive(@Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull Class targetClass) { assertEquals("{+baseurl}/ids/contentHashes/{contentHash}", requestInfo.urlTemplate); this.timesGetContentByHashCalled++; - return (ModelType)new ByteArrayInputStream(this.schemaContent.getBytes(StandardCharsets.UTF_8)); + return (ModelType) new ByteArrayInputStream(this.schemaContent.getBytes(StandardCharsets.UTF_8)); } @Nullable @Override - public List sendPrimitiveCollection(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull Class targetClass) { + public List sendPrimitiveCollection(@Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull Class targetClass) { throw new UnsupportedOperationException("Unimplemented"); } @Nullable @Override - public > ModelType sendEnum(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull ValuedEnumParser enumParser) { + public > ModelType sendEnum(@Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull ValuedEnumParser enumParser) { throw new UnsupportedOperationException("Unimplemented"); } @Nullable @Override - public > List sendEnumCollection(@Nonnull RequestInformation requestInfo, @Nullable HashMap> errorMappings, @Nonnull ValuedEnumParser enumParser) { + public > List sendEnumCollection( + @Nonnull RequestInformation requestInfo, + @Nullable HashMap> errorMappings, + @Nonnull ValuedEnumParser enumParser) { throw new UnsupportedOperationException("Unimplemented"); } diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockSchemaParser.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockSchemaParser.java index 606b3c8d0d..3a59c85667 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockSchemaParser.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/MockSchemaParser.java @@ -1,10 +1,10 @@ package io.apicurio.registry.resolver; +import io.apicurio.registry.resolver.data.Record; + import java.nio.charset.StandardCharsets; import java.util.Map; -import io.apicurio.registry.resolver.data.Record; - public class MockSchemaParser implements SchemaParser { private ParsedSchema dataSchema; private String parsedSchema; diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/SchemaLookupResultTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/SchemaLookupResultTest.java index 6c15a6d43e..dd7891219e 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/SchemaLookupResultTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/SchemaLookupResultTest.java @@ -1,10 +1,9 @@ package io.apicurio.registry.resolver; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import io.apicurio.registry.resolver.strategy.ArtifactReference; import org.junit.jupiter.api.Test; -import io.apicurio.registry.resolver.strategy.ArtifactReference; +import static org.junit.jupiter.api.Assertions.assertEquals; public class SchemaLookupResultTest { @Test diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/config/ConfigurationTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/config/ConfigurationTest.java index bfb3883d33..6755347194 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/config/ConfigurationTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/config/ConfigurationTest.java @@ -83,9 +83,12 @@ void testDefaultConfiguration() { assertEquals(false, config.findLatest()); assertEquals(false, config.getObject("apicurio.registry.find-latest")); - // TODO: Does not match documentation, overridden in `io.apicurio.registry.serde.SerdeConfig.ARTIFACT_RESOLVER_STRATEGY_DEFAULT` - assertEquals("io.apicurio.registry.resolver.strategy.DynamicArtifactReferenceResolverStrategy", config.getArtifactResolverStrategy()); - assertEquals("io.apicurio.registry.resolver.strategy.DynamicArtifactReferenceResolverStrategy", config.getObject("apicurio.registry.artifact-resolver-strategy")); + // TODO: Does not match documentation, overridden in + // `io.apicurio.registry.serde.SerdeConfig.ARTIFACT_RESOLVER_STRATEGY_DEFAULT` + assertEquals("io.apicurio.registry.resolver.strategy.DynamicArtifactReferenceResolverStrategy", + config.getArtifactResolverStrategy()); + assertEquals("io.apicurio.registry.resolver.strategy.DynamicArtifactReferenceResolverStrategy", + config.getObject("apicurio.registry.artifact-resolver-strategy")); key = "apicurio.registry.check-period-ms"; assertEquals(Duration.ofMillis(30000), config.getCheckPeriod()); diff --git a/schema-resolver/src/test/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImplTest.java b/schema-resolver/src/test/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImplTest.java index a2492da7c8..6682c92c8d 100644 --- a/schema-resolver/src/test/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImplTest.java +++ b/schema-resolver/src/test/java/io/apicurio/registry/resolver/strategy/ArtifactReferenceImplTest.java @@ -1,14 +1,16 @@ package io.apicurio.registry.resolver.strategy; -import static org.junit.jupiter.api.Assertions.assertTrue; - import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class ArtifactReferenceImplTest { @Test void testEqualsReturnsTrueWhenContentHashMatches() { - ArtifactReference artifact1 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo").build(); - ArtifactReference artifact2 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo").build(); + ArtifactReference artifact1 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo") + .build(); + ArtifactReference artifact2 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo") + .build(); assertTrue(artifact1.equals(artifact2)); assertTrue(artifact2.equals(artifact1)); @@ -16,8 +18,10 @@ void testEqualsReturnsTrueWhenContentHashMatches() { @Test void testEqualsReturnsFalseWhenContentHashesDontMatch() { - ArtifactReference artifact1 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo").build(); - ArtifactReference artifact2 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("bar").build(); + ArtifactReference artifact1 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("foo") + .build(); + ArtifactReference artifact2 = new ArtifactReferenceImpl.ArtifactReferenceBuilder().contentHash("bar") + .build(); assertTrue(!artifact1.equals(artifact2)); assertTrue(!artifact2.equals(artifact1)); diff --git a/schema-util/asyncapi/pom.xml b/schema-util/asyncapi/pom.xml index f05e0ac8dc..2ee8318550 100644 --- a/schema-util/asyncapi/pom.xml +++ b/schema-util/asyncapi/pom.xml @@ -1,52 +1,50 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-asyncapi - jar - apicurio-registry-schema-util-asyncapi + apicurio-registry-schema-util-asyncapi + jar + apicurio-registry-schema-util-asyncapi - + - - io.apicurio - apicurio-registry-schema-util-common - - - - io.apicurio - apicurio-registry-schema-util-openapi - + + io.apicurio + apicurio-registry-schema-util-common + - + + io.apicurio + apicurio-registry-schema-util-openapi + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/asyncapi/src/main/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinder.java b/schema-util/asyncapi/src/main/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinder.java index 988c242464..116d595106 100644 --- a/schema-util/asyncapi/src/main/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinder.java +++ b/schema-util/asyncapi/src/main/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinder.java @@ -1,7 +1,7 @@ package io.apicurio.registry.content.refs; /** - * OpenAPI implementation of a reference finder. Parses the OpenAPI document, finds all $refs, converts them + * OpenAPI implementation of a reference finder. Parses the OpenAPI document, finds all $refs, converts them * to external references, and returns them. */ public class AsyncApiReferenceFinder extends AbstractDataModelsReferenceFinder { diff --git a/schema-util/avro/pom.xml b/schema-util/avro/pom.xml index a509ab2e0e..855d351a6d 100644 --- a/schema-util/avro/pom.xml +++ b/schema-util/avro/pom.xml @@ -1,76 +1,74 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-avro - jar - apicurio-registry-schema-util-avro + apicurio-registry-schema-util-avro + jar + apicurio-registry-schema-util-avro - + - - io.apicurio - apicurio-registry-schema-util-common - + + io.apicurio + apicurio-registry-schema-util-common + - - io.apicurio - apicurio-registry-schema-util-common - ${project.version} - test-jar - test - + + io.apicurio + apicurio-registry-schema-util-common + ${project.version} + test-jar + test + - - com.fasterxml.jackson.core - jackson-databind - + + com.fasterxml.jackson.core + jackson-databind + - - org.apache.avro - avro - + + org.apache.avro + avro + - - org.junit.jupiter - junit-jupiter - test - - - commons-io - commons-io - test - + + org.junit.jupiter + junit-jupiter + test + + + commons-io + commons-io + test + - + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/AvroContentCanonicalizer.java b/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/AvroContentCanonicalizer.java index 59c3aafb1b..e09dd4947b 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/AvroContentCanonicalizer.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/AvroContentCanonicalizer.java @@ -20,11 +20,11 @@ /** * An Avro implementation of a content Canonicalizer that handles avro references. - * */ public class AvroContentCanonicalizer implements ContentCanonicalizer { - private final ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); + private final ObjectMapper mapper = new ObjectMapper() + .enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); private final Comparator fieldComparator = (n1, n2) -> { String name1 = n1.get("name").textValue(); @@ -63,7 +63,8 @@ public TypedContent canonicalize(TypedContent content, Map schemaRefs.add(schemaRef); } final Schema schema = parser.parse(content.getContent().content()); - return TypedContent.create(ContentHandle.create(schema.toString(schemaRefs, false)), ContentTypes.APPLICATION_JSON); + return TypedContent.create(ContentHandle.create(schema.toString(schemaRefs, false)), + ContentTypes.APPLICATION_JSON); } } } diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/EnhancedAvroContentCanonicalizer.java b/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/EnhancedAvroContentCanonicalizer.java index 215c34273e..54805807a3 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/EnhancedAvroContentCanonicalizer.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/content/canon/EnhancedAvroContentCanonicalizer.java @@ -12,8 +12,8 @@ import java.util.Map; /** - * An Avro implementation of a content Canonicalizer that handles avro references. - * A custom version that can be used to check subject compatibilities. It does not reorder fields. + * An Avro implementation of a content Canonicalizer that handles avro references. A custom version that can + * be used to check subject compatibilities. It does not reorder fields. */ public class EnhancedAvroContentCanonicalizer implements ContentCanonicalizer { @@ -33,7 +33,7 @@ public static Schema normalizeSchema(Schema schema) { /** * Normalize a schema. * - * @param schema a schema. + * @param schema a schema. * @param alreadyNormalized a Map indicating if the fields in the schema were already normalized. * @return the same schema functionally, in normalized form. */ @@ -53,16 +53,19 @@ private static Schema normalizeSchema(Schema schema, Map alread final Schema result; switch (schema.getType()) { case RECORD: - result = Schema.createRecord(schema.getName(), EMPTY_DOC, schema.getNamespace(), false, normalizeFields(schema.getFields(), alreadyNormalized)); + result = Schema.createRecord(schema.getName(), EMPTY_DOC, schema.getNamespace(), false, + normalizeFields(schema.getFields(), alreadyNormalized)); break; case ENUM: - result = Schema.createEnum(schema.getName(), EMPTY_DOC, schema.getNamespace(), schema.getEnumSymbols()); + result = Schema.createEnum(schema.getName(), EMPTY_DOC, schema.getNamespace(), + schema.getEnumSymbols()); break; case ARRAY: result = Schema.createArray(normalizeSchema(schema.getElementType(), alreadyNormalized)); break; case FIXED: - result = Schema.createFixed(schema.getName(), EMPTY_DOC, schema.getNamespace(), schema.getFixedSize()); + result = Schema.createFixed(schema.getName(), EMPTY_DOC, schema.getNamespace(), + schema.getFixedSize()); break; case UNION: result = Schema.createUnion(normalizeSchemasList(schema.getTypes(), alreadyNormalized)); @@ -77,7 +80,8 @@ private static Schema normalizeSchema(Schema schema, Map alread return result; } - private static List normalizeSchemasList(List schemas, Map alreadyNormalized) { + private static List normalizeSchemasList(List schemas, + Map alreadyNormalized) { final List result = new ArrayList<>(schemas.size()); for (Schema schema : schemas) { result.add(normalizeSchema(schema, alreadyNormalized)); @@ -86,12 +90,15 @@ private static List normalizeSchemasList(List schemas, Map alreadyNormalized) { - final Schema.Field result = new Schema.Field(field.name(), normalizeSchema(field.schema(), alreadyNormalized), EMPTY_DOC, field.defaultVal(), field.order()); + final Schema.Field result = new Schema.Field(field.name(), + normalizeSchema(field.schema(), alreadyNormalized), EMPTY_DOC, field.defaultVal(), + field.order()); field.getObjectProps().forEach(result::addProp); return result; } - private static List normalizeFields(List fields, Map alreadyNormalized) { + private static List normalizeFields(List fields, + Map alreadyNormalized) { List result = new ArrayList<>(fields.size()); for (Schema.Field field : fields) { result.add(normalizeField(field, alreadyNormalized)); diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/content/dereference/AvroDereferencer.java b/schema-util/avro/src/main/java/io/apicurio/registry/content/dereference/AvroDereferencer.java index e78ebebb21..95aaa14dd0 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/content/dereference/AvroDereferencer.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/content/dereference/AvroDereferencer.java @@ -18,15 +18,16 @@ public TypedContent dereference(TypedContent content, Map final Schema schema = parser.parse(content.getContent().content()); return TypedContent.create(ContentHandle.create(schema.toString()), ContentTypes.APPLICATION_JSON); } - + /** - * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, java.util.Map) + * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, + * java.util.Map) */ @Override public TypedContent rewriteReferences(TypedContent content, Map resolvedReferenceUrls) { - // Avro does not support rewriting references. A reference in Avro is a QName of a type - // defined in another .avsc file. The location of that other file is not included in the Avro - // specification (in other words there is no "import" statement). So rewriting is meaningless + // Avro does not support rewriting references. A reference in Avro is a QName of a type + // defined in another .avsc file. The location of that other file is not included in the Avro + // specification (in other words there is no "import" statement). So rewriting is meaningless // in Avro. return content; } diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/content/extract/AvroContentExtractor.java b/schema-util/avro/src/main/java/io/apicurio/registry/content/extract/AvroContentExtractor.java index 3e703b24d4..575404db5b 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/content/extract/AvroContentExtractor.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/content/extract/AvroContentExtractor.java @@ -1,14 +1,12 @@ package io.apicurio.registry.content.extract; -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; - import io.apicurio.registry.content.ContentHandle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; /** * Performs meta-data extraction for Avro content. diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/content/refs/AvroReferenceFinder.java b/schema-util/avro/src/main/java/io/apicurio/registry/content/refs/AvroReferenceFinder.java index 22635c72e2..a29033e17d 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/content/refs/AvroReferenceFinder.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/content/refs/AvroReferenceFinder.java @@ -20,7 +20,8 @@ public class AvroReferenceFinder implements ReferenceFinder { private static final ObjectMapper mapper = new ObjectMapper(); private static final Logger log = LoggerFactory.getLogger(AvroReferenceFinder.class); - private static final Set PRIMITIVE_TYPES = Set.of("null", "boolean", "int", "long", "float", "double", "bytes", "string"); + private static final Set PRIMITIVE_TYPES = Set.of("null", "boolean", "int", "long", "float", + "double", "bytes", "string"); /** * @see io.apicurio.registry.content.refs.ReferenceFinder#findExternalReferences(TypedContent) @@ -31,7 +32,8 @@ public Set findExternalReferences(TypedContent content) { JsonNode tree = mapper.readTree(content.getContent().content()); Set externalTypes = new HashSet<>(); findExternalTypesIn(tree, externalTypes); - return externalTypes.stream().map(type -> new ExternalReference(type)).collect(Collectors.toSet()); + return externalTypes.stream().map(type -> new ExternalReference(type)) + .collect(Collectors.toSet()); } catch (Exception e) { log.error("Error finding external references in an Avro file.", e); return Collections.emptySet(); @@ -43,7 +45,7 @@ private static void findExternalTypesIn(JsonNode schema, Set externalTyp if (schema == null || schema.isNull()) { return; } - + // Handle primitive/external types if (schema.isTextual()) { String type = schema.asText(); @@ -51,15 +53,16 @@ private static void findExternalTypesIn(JsonNode schema, Set externalTyp externalTypes.add(type); } } - + // Handle unions if (schema.isArray()) { ArrayNode schemas = (ArrayNode) schema; schemas.forEach(s -> findExternalTypesIn(s, externalTypes)); } - + // Handle records - if (schema.isObject() && schema.has("type") && !schema.get("type").isNull() && schema.get("type").asText().equals("record")) { + if (schema.isObject() && schema.has("type") && !schema.get("type").isNull() + && schema.get("type").asText().equals("record")) { JsonNode fieldsNode = schema.get("fields"); if (fieldsNode != null && fieldsNode.isArray()) { ArrayNode fields = (ArrayNode) fieldsNode; @@ -72,7 +75,8 @@ private static void findExternalTypesIn(JsonNode schema, Set externalTyp } } // Handle arrays - if (schema.has("type") && !schema.get("type").isNull() && schema.get("type").asText().equals("array")) { + if (schema.has("type") && !schema.get("type").isNull() + && schema.get("type").asText().equals("array")) { JsonNode items = schema.get("items"); findExternalTypesIn(items, externalTypes); } diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/rules/compatibility/AvroCompatibilityChecker.java b/schema-util/avro/src/main/java/io/apicurio/registry/rules/compatibility/AvroCompatibilityChecker.java index b9d18068a6..ec72c03512 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/rules/compatibility/AvroCompatibilityChecker.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/rules/compatibility/AvroCompatibilityChecker.java @@ -15,7 +15,8 @@ public class AvroCompatibilityChecker extends AbstractCompatibilityChecker { @Override - protected Set isBackwardsCompatibleWith(String existing, String proposed, Map resolvedReferences) { + protected Set isBackwardsCompatibleWith(String existing, String proposed, + Map resolvedReferences) { try { Schema.Parser existingParser = new Schema.Parser(); for (TypedContent schema : resolvedReferences.values()) { @@ -29,18 +30,22 @@ protected Set isBackwardsCompatibleWith(String existing, String } final Schema proposedSchema = proposingParser.parse(proposed); - var result = SchemaCompatibility.checkReaderWriterCompatibility(proposedSchema, existingSchema).getResult(); + var result = SchemaCompatibility.checkReaderWriterCompatibility(proposedSchema, existingSchema) + .getResult(); switch (result.getCompatibility()) { case COMPATIBLE: return Collections.emptySet(); case INCOMPATIBLE: { - return ImmutableSet.builder().addAll(result.getIncompatibilities()).build(); + return ImmutableSet. builder().addAll(result.getIncompatibilities()) + .build(); } default: - throw new IllegalStateException("Got illegal compatibility result: " + result.getCompatibility()); + throw new IllegalStateException( + "Got illegal compatibility result: " + result.getCompatibility()); } } catch (AvroRuntimeException ex) { - throw new UnprocessableSchemaException("Could not execute compatibility rule on invalid Avro schema", ex); + throw new UnprocessableSchemaException( + "Could not execute compatibility rule on invalid Avro schema", ex); } } diff --git a/schema-util/avro/src/main/java/io/apicurio/registry/rules/validity/AvroContentValidator.java b/schema-util/avro/src/main/java/io/apicurio/registry/rules/validity/AvroContentValidator.java index 8f238522f6..f2f9b7caa5 100644 --- a/schema-util/avro/src/main/java/io/apicurio/registry/rules/validity/AvroContentValidator.java +++ b/schema-util/avro/src/main/java/io/apicurio/registry/rules/validity/AvroContentValidator.java @@ -16,7 +16,7 @@ * A content validator implementation for the Avro content type. */ public class AvroContentValidator implements ContentValidator { - + private static final String DUMMY_AVRO_RECORD = """ { "type": "record", @@ -38,7 +38,8 @@ public AvroContentValidator() { * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { Schema.Parser parser = new Schema.Parser(); @@ -47,16 +48,18 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { try { Schema.Parser parser = new Schema.Parser(); references.forEach(ref -> { @@ -64,7 +67,7 @@ public void validateReferences(TypedContent content, List ref if (refName != null && refName.contains(".")) { int idx = refName.lastIndexOf('.'); String ns = refName.substring(0, idx); - String name = refName.substring(idx+1); + String name = refName.substring(idx + 1); parser.parse(DUMMY_AVRO_RECORD.replace("NAMESPACE", ns).replace("NAME", name)); } }); @@ -74,8 +77,9 @@ public void validateReferences(TypedContent content, List ref // is because of a missing defined type or some OTHER parse exception. if (e.getMessage().contains("is not a defined name")) { RuleViolation violation = new RuleViolation("Missing reference detected.", e.getMessage()); - throw new RuleViolationException("Missing reference detected in Avro artifact.", RuleType.INTEGRITY, - IntegrityLevel.ALL_REFS_MAPPED.name(), Collections.singleton(violation)); + throw new RuleViolationException("Missing reference detected in Avro artifact.", + RuleType.INTEGRITY, IntegrityLevel.ALL_REFS_MAPPED.name(), + Collections.singleton(violation)); } } } diff --git a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityChecker.java b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityChecker.java index d685c3e4ee..00973653b4 100644 --- a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityChecker.java +++ b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityChecker.java @@ -17,7 +17,8 @@ public class AvroCompatibilityChecker { public static final AvroCompatibilityChecker FORWARD_TRANSITIVE_CHECKER; public static final AvroCompatibilityChecker FULL_TRANSITIVE_CHECKER; public static final AvroCompatibilityChecker NO_OP_CHECKER; - private static final SchemaValidator BACKWARD_VALIDATOR = (new SchemaValidatorBuilder()).canReadStrategy().validateLatest(); + private static final SchemaValidator BACKWARD_VALIDATOR = (new SchemaValidatorBuilder()).canReadStrategy() + .validateLatest(); private static final SchemaValidator FORWARD_VALIDATOR; private static final SchemaValidator FULL_VALIDATOR; private static final SchemaValidator BACKWARD_TRANSITIVE_VALIDATOR; diff --git a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityTest.java b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityTest.java index b4b804e007..aea713446a 100644 --- a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityTest.java +++ b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/AvroCompatibilityTest.java @@ -15,57 +15,54 @@ class AvroCompatibilityTest { private final ContentCanonicalizer avroCanonicalizer = new EnhancedAvroContentCanonicalizer(); - private final String schemaString1 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" + private final String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"}]}"; - private final Schema schema1 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString1), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); + private final Schema schema1 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString1), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); - private final String schemaString2 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" + private final String schemaString2 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"}," + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}"; - private final Schema schema2 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString2), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); + private final Schema schema2 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString2), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); - private final String schemaString3 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" - + "[{\"type\":\"string\",\"name\":\"f1\"}," - + " {\"type\":\"string\",\"name\":\"f2\"}]}"; - private final Schema schema3 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString3), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); + private final String schemaString3 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + " {\"type\":\"string\",\"name\":\"f2\"}]}"; + private final Schema schema3 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString3), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); - private final String schemaString4 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" + private final String schemaString4 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1_new\", \"aliases\": [\"f1\"]}]}"; - private final Schema schema4 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString4), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); - - private final String schemaString6 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" - + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\"," - + " \"doc\":\"doc of f1\"}]}"; - private final Schema schema6 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString6), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); - - private final String schemaString7 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" - + "[{\"type\":[\"null\", \"string\", \"int\"],\"name\":\"f1\"," - + " \"doc\":\"doc of f1\"}]}"; - private final Schema schema7 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString7), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); - - private final String schemaString8 = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" + private final Schema schema4 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString4), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); + + private final String schemaString6 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\"," + " \"doc\":\"doc of f1\"}]}"; + private final Schema schema6 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString6), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); + + private final String schemaString7 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + + "[{\"type\":[\"null\", \"string\", \"int\"],\"name\":\"f1\"," + " \"doc\":\"doc of f1\"}]}"; + private final Schema schema7 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString7), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); + + private final String schemaString8 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"}," + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}," + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; - private final Schema schema8 = new Schema.Parser().parse(avroCanonicalizer.canonicalize(TypedContent.create(ContentHandle.create(schemaString8), ContentTypes.APPLICATION_JSON), new HashMap<>()).getContent().content()); + private final Schema schema8 = new Schema.Parser().parse(avroCanonicalizer.canonicalize( + TypedContent.create(ContentHandle.create(schemaString8), ContentTypes.APPLICATION_JSON), + new HashMap<>()).getContent().content()); /* - * Backward compatibility: A new schema is backward compatible if it can be used to read the data - * written in the previous schema. + * Backward compatibility: A new schema is backward compatible if it can be used to read the data written + * in the previous schema. */ @Test void testBasicBackwardsCompatibility() { @@ -91,8 +88,8 @@ void testBasicBackwardsCompatibility() { } /* - * Backward transitive compatibility: A new schema is backward compatible if it can be used to read the data - * written in all previous schemas. + * Backward transitive compatibility: A new schema is backward compatible if it can be used to read the + * data written in all previous schemas. */ @Test void testBasicBackwardsTransitiveCompatibility() { @@ -111,8 +108,8 @@ void testBasicBackwardsTransitiveCompatibility() { } /* - * Forward compatibility: A new schema is forward compatible if the previous schema can read data written in this - * schema. + * Forward compatibility: A new schema is forward compatible if the previous schema can read data written + * in this schema. */ @Test void testBasicForwardsCompatibility() { @@ -132,8 +129,8 @@ void testBasicForwardsCompatibility() { } /* - * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read data written - * in this schema. + * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read + * data written in this schema. */ @Test void testBasicForwardsTransitiveCompatibility() { @@ -169,8 +166,8 @@ void testBasicFullCompatibility() { } /* - * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward - * and transitively forward compatible with the entire schema history. + * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward and + * transitively forward compatible with the entire schema history. */ @Test void testBasicFullTransitiveCompatibility() { diff --git a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/SchemaNormalizerTest.java b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/SchemaNormalizerTest.java index dc57c6f7df..70230e4bf1 100644 --- a/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/SchemaNormalizerTest.java +++ b/schema-util/avro/src/test/java/io/apicurio/registry/content/canon/SchemaNormalizerTest.java @@ -27,291 +27,219 @@ private TypedContent toTypedContent(String content) { @Test void parseSchema_SchemasWithOptionalAttributesInRoot_Equal() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": []\n" + - "}"; - String schemaWithOptionalStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"doc\": \"some description\",\n" + // optional attribute - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": []\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": []\n" + "}"; + String schemaWithOptionalStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"doc\": \"some description\",\n" + // optional attribute + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": []\n" + "}"; // act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithOptional = canonicalizer.canonicalize(toTypedContent(schemaWithOptionalStr), new HashMap<>()); + TypedContent schemaWithOptional = canonicalizer.canonicalize(toTypedContent(schemaWithOptionalStr), + new HashMap<>()); // assert assertEquals(schema.getContent().content(), schemaWithOptional.getContent().content()); - assertEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithOptional.getContent().content())); + assertEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithOptional.getContent().content())); } @Test void parseSchema_SchemaWithNamespaceInNameAndInNamespaceField_Equal() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"com.example.client.example.schema.schemaName\",\n" + - " \"doc\": \"some description\",\n" + - " \"fields\": []\n" + - "}"; - String schemaWithNamespaceFieldStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"doc\": \"some description\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": []\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"com.example.client.example.schema.schemaName\",\n" + + " \"doc\": \"some description\",\n" + " \"fields\": []\n" + "}"; + String schemaWithNamespaceFieldStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"schemaName\",\n" + " \"doc\": \"some description\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": []\n" + "}"; // act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithNamespaceField = canonicalizer.canonicalize(toTypedContent(schemaWithNamespaceFieldStr), new HashMap<>()); + TypedContent schemaWithNamespaceField = canonicalizer + .canonicalize(toTypedContent(schemaWithNamespaceFieldStr), new HashMap<>()); // assert assertEquals(schema.getContent().content(), schemaWithNamespaceField.getContent().content()); - assertEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithNamespaceField.getContent().content())); + assertEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithNamespaceField.getContent().content())); } @Test void parseSchema_SchemaWithDifferentNamespaceInNameAndInNamespaceField_NotEqual() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"com.different.client.example.schema.schemaName\",\n" + - " \"doc\": \"some description\",\n" + - " \"fields\": []\n" + - "}"; - String schemaWithNamespaceFieldStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"doc\": \"some description\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": []\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"com.different.client.example.schema.schemaName\",\n" + + " \"doc\": \"some description\",\n" + " \"fields\": []\n" + "}"; + String schemaWithNamespaceFieldStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"schemaName\",\n" + " \"doc\": \"some description\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": []\n" + "}"; // act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithNamespaceField = canonicalizer.canonicalize(toTypedContent(schemaWithNamespaceFieldStr), new HashMap<>()); + TypedContent schemaWithNamespaceField = canonicalizer + .canonicalize(toTypedContent(schemaWithNamespaceFieldStr), new HashMap<>()); // assert assertNotEquals(schema.getContent().content(), schemaWithNamespaceField.getContent().content()); - assertNotEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithNamespaceField.getContent().content())); + assertNotEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithNamespaceField.getContent().content())); } - + @Test void parseSchema_SchemasWithDifferenceAttributesOrderInRoot_Equal() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"doc\": \"some description\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": []\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"doc\": \"some description\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": []\n" + "}"; String schemaWithDifferenceAttributesOrderStr = "{\n" + // reverse order of keys - " \"fields\": [],\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"doc\": \"some description\",\n" + - " \"name\": \"schemaName\",\n" + - " \"type\": \"record\"\n" + - "}"; + " \"fields\": [],\n" + " \"namespace\": \"com.example.client.example.schema\",\n" + + " \"doc\": \"some description\",\n" + " \"name\": \"schemaName\",\n" + + " \"type\": \"record\"\n" + "}"; // act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithDifferenceAttributesOrder = canonicalizer.canonicalize(toTypedContent(schemaWithDifferenceAttributesOrderStr), new HashMap<>()); + TypedContent schemaWithDifferenceAttributesOrder = canonicalizer + .canonicalize(toTypedContent(schemaWithDifferenceAttributesOrderStr), new HashMap<>()); // assert - assertEquals(schema.getContent().content(), schemaWithDifferenceAttributesOrder.getContent().content()); - assertEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithDifferenceAttributesOrder.getContent().content())); + assertEquals(schema.getContent().content(), + schemaWithDifferenceAttributesOrder.getContent().content()); + assertEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithDifferenceAttributesOrder.getContent().content())); } @Test void parseSchema_SchemasWithOptionalAttributesInField_Equal() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"timestamp\",\n" + - " \"type\": \"long\"\n" + // without 'doc' attribute - " }]\n" + - "}"; - - String schemaWithOptionalAttributesInFieldStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"timestamp\",\n" + - " \"type\": \"long\",\n" + - " \"doc\": \"Timestamp of the event\"\n" + // added optional field - " }]\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"timestamp\",\n" + " \"type\": \"long\"\n" + // without + // 'doc' + // attribute + " }]\n" + "}"; + + String schemaWithOptionalAttributesInFieldStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"timestamp\",\n" + " \"type\": \"long\",\n" + + " \"doc\": \"Timestamp of the event\"\n" + // added optional field + " }]\n" + "}"; // act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithOptionalAttributesInField = canonicalizer.canonicalize(toTypedContent(schemaWithOptionalAttributesInFieldStr), new HashMap<>()); + TypedContent schemaWithOptionalAttributesInField = canonicalizer + .canonicalize(toTypedContent(schemaWithOptionalAttributesInFieldStr), new HashMap<>()); // assert - assertEquals(schema.getContent().content(), schemaWithOptionalAttributesInField.getContent().content()); - assertEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithOptionalAttributesInField.getContent().content())); + assertEquals(schema.getContent().content(), + schemaWithOptionalAttributesInField.getContent().content()); + assertEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithOptionalAttributesInField.getContent().content())); } @Test void parseSchema_SchemasWithDifferenceAttributesOrderInField_Equal() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"message\",\n" + // `name` 1st `type` 2nd - " \"type\": \"string\"\n" + - " }]\n" + - "}"; - - String schemasWithDifferenceAttributesOrderInFieldStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"type\": \"string\",\n" + // `type` 1st `name` 2nd - " \"name\": \"message\"\n" + - " }]\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"message\",\n" + // `name` 1st `type` 2nd + " \"type\": \"string\"\n" + " }]\n" + "}"; + + String schemasWithDifferenceAttributesOrderInFieldStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"type\": \"string\",\n" + // `type` 1st `name` 2nd + " \"name\": \"message\"\n" + " }]\n" + "}"; // Act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemasWithDifferenceAttributesOrderInField = canonicalizer.canonicalize(toTypedContent(schemasWithDifferenceAttributesOrderInFieldStr), new HashMap<>()); + TypedContent schemasWithDifferenceAttributesOrderInField = canonicalizer.canonicalize( + toTypedContent(schemasWithDifferenceAttributesOrderInFieldStr), new HashMap<>()); // Assert - assertEquals(schema.getContent().content(), schemasWithDifferenceAttributesOrderInField.getContent().content()); - assertEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemasWithDifferenceAttributesOrderInField.getContent().content())); + assertEquals(schema.getContent().content(), + schemasWithDifferenceAttributesOrderInField.getContent().content()); + assertEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemasWithDifferenceAttributesOrderInField.getContent().content())); } @Test void parseSchema_SchemasWithFieldsInDifferentOrder_NotEqual() { // prepare - String schemaStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"message\",\n" + - " \"type\": \"string\"\n" + - " },\n" + - " {\n" + - " \"name\": \"sender\",\n" + - " \"type\": \"string\"\n" + - " }]\n" + - "}"; - - String schemaWithFieldsInDifferentOrderStr = "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"schemaName\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"sender\",\n" + - " \"type\": \"string\"\n" + - " },\n" + - " {\n" + - " \"name\": \"message\",\n" + - " \"type\": \"string\"\n" + - " }]\n" + - "}"; + String schemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"message\",\n" + " \"type\": \"string\"\n" + " },\n" + + " {\n" + " \"name\": \"sender\",\n" + " \"type\": \"string\"\n" + " }]\n" + + "}"; + + String schemaWithFieldsInDifferentOrderStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"schemaName\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"sender\",\n" + " \"type\": \"string\"\n" + " },\n" + + " {\n" + " \"name\": \"message\",\n" + " \"type\": \"string\"\n" + " }]\n" + + "}"; // Act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); TypedContent schema = canonicalizer.canonicalize(toTypedContent(schemaStr), new HashMap<>()); - TypedContent schemaWithFieldsInDifferentOrder = canonicalizer.canonicalize(toTypedContent(schemaWithFieldsInDifferentOrderStr), new HashMap<>()); + TypedContent schemaWithFieldsInDifferentOrder = canonicalizer + .canonicalize(toTypedContent(schemaWithFieldsInDifferentOrderStr), new HashMap<>()); // Assert - assertNotEquals(schema.getContent().content(), schemaWithFieldsInDifferentOrder.getContent().content()); - assertNotEquals(Schema.parseJsonToObject(schema.getContent().content()), Schema.parseJsonToObject(schemaWithFieldsInDifferentOrder.getContent().content())); + assertNotEquals(schema.getContent().content(), + schemaWithFieldsInDifferentOrder.getContent().content()); + assertNotEquals(Schema.parseJsonToObject(schema.getContent().content()), + Schema.parseJsonToObject(schemaWithFieldsInDifferentOrder.getContent().content())); } @Test void parseSchema_NestedSchemasWithDifferenceAttributesOrderInField_Equal() { - String nestedSchemaStr = - "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"Schema\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"name\",\n" + - " \"type\": \"string\"\n" + - " },\n" + - " {\n" + - " \"name\": \"innerSchema\",\n" + - " \"type\": {\n" + - " \"type\": \"record\",\n" + - " \"name\": \"NestedSchema\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"innerName\",\n" + // `name` 1st `type` 2nd - " \"type\": \"string\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - " ]\n" + - "}"; - - String schemaWithDifferenceAttributesOrderInNestedSchemaStr = - "{\n" + - " \"type\": \"record\",\n" + - " \"name\": \"Schema\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"name\": \"name\",\n" + - " \"type\": \"string\"\n" + - " },\n" + - " {\n" + - " \"name\": \"innerSchema\",\n" + - " \"type\": {\n" + - " \"type\": \"record\",\n" + - " \"name\": \"NestedSchema\",\n" + - " \"namespace\": \"com.example.client.example.schema\",\n" + - " \"fields\": [\n" + - " {\n" + - " \"type\": \"string\",\n" + // `type` 1st `name` 2nd - " \"name\": \"innerName\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - " ]\n" + - "}"; - + String nestedSchemaStr = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"Schema\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + " \"fields\": [\n" + + " {\n" + " \"name\": \"name\",\n" + " \"type\": \"string\"\n" + " },\n" + + " {\n" + " \"name\": \"innerSchema\",\n" + " \"type\": {\n" + + " \"type\": \"record\",\n" + " \"name\": \"NestedSchema\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + + " \"fields\": [\n" + " {\n" + " \"name\": \"innerName\",\n" + // `name` + // 1st + // `type` + // 2nd + " \"type\": \"string\"\n" + " }\n" + " ]\n" + " }\n" + + " }\n" + " ]\n" + "}"; + + String schemaWithDifferenceAttributesOrderInNestedSchemaStr = "{\n" + " \"type\": \"record\",\n" + + " \"name\": \"Schema\",\n" + " \"namespace\": \"com.example.client.example.schema\",\n" + + " \"fields\": [\n" + " {\n" + " \"name\": \"name\",\n" + + " \"type\": \"string\"\n" + " },\n" + " {\n" + + " \"name\": \"innerSchema\",\n" + " \"type\": {\n" + + " \"type\": \"record\",\n" + " \"name\": \"NestedSchema\",\n" + + " \"namespace\": \"com.example.client.example.schema\",\n" + + " \"fields\": [\n" + " {\n" + " \"type\": \"string\",\n" + // `type` + // 1st + // `name` + // 2nd + " \"name\": \"innerName\"\n" + " }\n" + " ]\n" + " }\n" + + " }\n" + " ]\n" + "}"; // Act EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); - TypedContent nestedSchema = canonicalizer.canonicalize(toTypedContent(nestedSchemaStr), new HashMap<>()); - TypedContent schemaWithDifferenceAttributesOrderInNestedSchema = canonicalizer.canonicalize(toTypedContent(schemaWithDifferenceAttributesOrderInNestedSchemaStr), new HashMap<>()); + TypedContent nestedSchema = canonicalizer.canonicalize(toTypedContent(nestedSchemaStr), + new HashMap<>()); + TypedContent schemaWithDifferenceAttributesOrderInNestedSchema = canonicalizer.canonicalize( + toTypedContent(schemaWithDifferenceAttributesOrderInNestedSchemaStr), new HashMap<>()); // Assert - assertEquals(nestedSchema.getContent().content(), schemaWithDifferenceAttributesOrderInNestedSchema.getContent().content()); - assertEquals(Schema.parseJsonToObject(nestedSchema.getContent().content()), Schema.parseJsonToObject(schemaWithDifferenceAttributesOrderInNestedSchema.getContent().content())); + assertEquals(nestedSchema.getContent().content(), + schemaWithDifferenceAttributesOrderInNestedSchema.getContent().content()); + assertEquals(Schema.parseJsonToObject(nestedSchema.getContent().content()), Schema + .parseJsonToObject(schemaWithDifferenceAttributesOrderInNestedSchema.getContent().content())); } @Test @@ -334,8 +262,10 @@ void parseSchema_NestedSchemasOfSameType() throws Exception { }); // and the parsed schema should still be the same - assertEquals(normalizedSchemas.get(0).getContent().content(), normalizedSchemas.get(1).getContent().content()); - assertEquals(Schema.parseJsonToObject(normalizedSchemas.get(0).getContent().content()), Schema.parseJsonToObject(normalizedSchemas.get(1).getContent().content())); + assertEquals(normalizedSchemas.get(0).getContent().content(), + normalizedSchemas.get(1).getContent().content()); + assertEquals(Schema.parseJsonToObject(normalizedSchemas.get(0).getContent().content()), + Schema.parseJsonToObject(normalizedSchemas.get(1).getContent().content())); } @Test @@ -346,7 +276,8 @@ void parseSchema_unionOfNullAndSelf() throws Exception { // the schema should be parsed with a non-null result EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); - final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithNullUnion), new HashMap<>()); + final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithNullUnion), + new HashMap<>()); assertNotNull(parsed); } @@ -359,7 +290,8 @@ void parseSchema_withJavaType() throws Exception { // the schema should be parsed with a non-null result EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); - final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithJavaType), new HashMap<>()); + final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithJavaType), + new HashMap<>()); assertNotNull(parsed); } @@ -367,12 +299,14 @@ void parseSchema_withJavaType() throws Exception { @Test void parseSchema_withLogicalType() throws Exception { // given a schema containing a custom date type with logicalType - final String schemaWithCustomType = getSchemaFromResource("avro/advanced/schema-with-logicaltype.avsc"); + final String schemaWithCustomType = getSchemaFromResource( + "avro/advanced/schema-with-logicaltype.avsc"); assertNotNull(schemaWithCustomType); // the schema should be parsed with a non-null result EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); - final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithCustomType), new HashMap<>()); + final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithCustomType), + new HashMap<>()); assertNotNull(parsed); } @@ -380,12 +314,14 @@ void parseSchema_withLogicalType() throws Exception { @Test void parseSchema_withNestedEnumAndDefault() throws Exception { // given a schema containing a custom date type with logicalType - final String schemaWithCustomType = getSchemaFromResource("avro/advanced/schema-deeply-nested-enum-default.avsc"); + final String schemaWithCustomType = getSchemaFromResource( + "avro/advanced/schema-deeply-nested-enum-default.avsc"); assertNotNull(schemaWithCustomType); // the schema should be parsed with a non-null result EnhancedAvroContentCanonicalizer canonicalizer = new EnhancedAvroContentCanonicalizer(); - final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithCustomType), new HashMap<>()); + final TypedContent parsed = canonicalizer.canonicalize(toTypedContent(schemaWithCustomType), + new HashMap<>()); assertNotNull(parsed); } diff --git a/schema-util/common/pom.xml b/schema-util/common/pom.xml index a554ddb473..5702774e6d 100644 --- a/schema-util/common/pom.xml +++ b/schema-util/common/pom.xml @@ -1,111 +1,109 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - - - apicurio-registry-schema-util-common - jar - apicurio-registry-schema-util-common - - - - - io.apicurio - apicurio-registry-common - - - - org.projectlombok - lombok - compile - - - - commons-codec - commons-codec - - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - - - snakeyaml - org.yaml - - - - - - org.yaml - snakeyaml - ${snakeyaml.version} - - - - com.fasterxml.jackson.module - jackson-module-parameter-names - - - - com.fasterxml.jackson.datatype - jackson-datatype-jdk8 - - - - com.fasterxml.jackson.datatype - jackson-datatype-jsr310 - - - - org.slf4j - slf4j-api - - - - org.jboss.slf4j - slf4j-jboss-logging - ${jboss-slf4j.version} - - - - com.fasterxml.jackson.datatype - jackson-datatype-json-org - - - - com.google.guava - guava - - - - - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-schema-util-common + jar + apicurio-registry-schema-util-common + + + + + io.apicurio + apicurio-registry-common + + + + org.projectlombok + lombok + compile + + + + commons-codec + commons-codec + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + + org.yaml + snakeyaml + + + + + + org.yaml + snakeyaml + ${snakeyaml.version} + + + + com.fasterxml.jackson.module + jackson-module-parameter-names + + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + + org.slf4j + slf4j-api + + + + org.jboss.slf4j + slf4j-jboss-logging + ${jboss-slf4j.version} + + + + com.fasterxml.jackson.datatype + jackson-datatype-json-org + + + + com.google.guava + guava + + + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/AbstractContentHandle.java b/schema-util/common/src/main/java/io/apicurio/registry/content/AbstractContentHandle.java index 74ba2b1099..550a86b95c 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/AbstractContentHandle.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/AbstractContentHandle.java @@ -18,8 +18,10 @@ public InputStream stream() { @Override public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof ContentHandle)) return false; + if (this == o) + return true; + if (!(o instanceof ContentHandle)) + return false; ContentHandle that = (ContentHandle) o; return Arrays.equals(bytes(), that.bytes()); } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/TypedContent.java b/schema-util/common/src/main/java/io/apicurio/registry/content/TypedContent.java index f6288d09b6..f01375eef1 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/TypedContent.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/TypedContent.java @@ -3,18 +3,15 @@ public interface TypedContent { static TypedContent create(ContentHandle content, String contentType) { - return TypedContentImpl.builder() - .contentType(contentType) - .content(content) - .build(); + return TypedContentImpl.builder().contentType(contentType).content(content).build(); } + static TypedContent create(String content, String contentType) { - return TypedContentImpl.builder() - .contentType(contentType) - .content(ContentHandle.create(content)) + return TypedContentImpl.builder().contentType(contentType).content(ContentHandle.create(content)) .build(); } ContentHandle getContent(); + String getContentType(); } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/canon/ContentCanonicalizer.java b/schema-util/common/src/main/java/io/apicurio/registry/content/canon/ContentCanonicalizer.java index 38416f9f33..52f4371612 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/canon/ContentCanonicalizer.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/canon/ContentCanonicalizer.java @@ -5,15 +5,14 @@ import java.util.Map; /** - * Canonicalize some content! This means converting content to its canonical form for - * the purpose of comparison. Should remove things like formatting and should sort - * fields when ordering is not important. - * + * Canonicalize some content! This means converting content to its canonical form for the purpose of + * comparison. Should remove things like formatting and should sort fields when ordering is not important. */ public interface ContentCanonicalizer { - + /** * Called to convert the given content to its canonical form. + * * @param content */ public TypedContent canonicalize(TypedContent content, Map resolvedReferences); diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/canon/NoOpContentCanonicalizer.java b/schema-util/common/src/main/java/io/apicurio/registry/content/canon/NoOpContentCanonicalizer.java index bfeab8fd14..373ef4ab14 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/canon/NoOpContentCanonicalizer.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/canon/NoOpContentCanonicalizer.java @@ -8,7 +8,7 @@ * A canonicalizer that passes through the content unchanged. */ public class NoOpContentCanonicalizer implements ContentCanonicalizer { - + /** * @see ContentCanonicalizer#canonicalize(TypedContent, Map) */ diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/dereference/ContentDereferencer.java b/schema-util/common/src/main/java/io/apicurio/registry/content/dereference/ContentDereferencer.java index cfdf7a308c..71609fb506 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/dereference/ContentDereferencer.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/dereference/ContentDereferencer.java @@ -5,22 +5,25 @@ import java.util.Map; /** - * Dereference some content! This means replacing any reference inside the content by the full referenced content. - * The result is an artifact content that can be used on its own. + * Dereference some content! This means replacing any reference inside the content by the full referenced + * content. The result is an artifact content that can be used on its own. */ public interface ContentDereferencer { /** * Called to dereference the given content to its dereferenced form + * * @param content */ TypedContent dereference(TypedContent content, Map resolvedReferences); - + /** - * Called to rewrite any references in the content so that they point to valid Registry API URLs rather than - * "logical" values. For example, if an OpenAPI document has a $ref property with - * a value of ./common-types.json#/defs/FooType this method will rewrite that property - * to something like https://registry.example.com/apis/registry/v3/groups/Example/artifacts/CommonTypes/versions/1.0. + * Called to rewrite any references in the content so that they point to valid Registry API URLs rather + * than "logical" values. For example, if an OpenAPI document has a $ref property with a + * value of ./common-types.json#/defs/FooType this method will rewrite that property to + * something like + * https://registry.example.com/apis/registry/v3/groups/Example/artifacts/CommonTypes/versions/1.0. + * * @param content * @param resolvedReferenceUrls */ diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ContentExtractor.java b/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ContentExtractor.java index 2fc7c99dd3..4a62daef87 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ContentExtractor.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ContentExtractor.java @@ -9,8 +9,7 @@ */ public interface ContentExtractor { /** - * Extract metadata from content. - * Return null if no content is extracted. + * Extract metadata from content. Return null if no content is extracted. * * @param content the content * @return extracted metadata or null if none diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ExtractedMetaData.java b/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ExtractedMetaData.java index f7816cd5ee..58015092f4 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ExtractedMetaData.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/extract/ExtractedMetaData.java @@ -7,7 +7,7 @@ public class ExtractedMetaData { private String name; private String description; private Map labels; - + /** * Constructor. */ diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ExternalReference.java b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ExternalReference.java index 4c421c0522..f5330d74d1 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ExternalReference.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ExternalReference.java @@ -3,8 +3,8 @@ import java.util.Objects; /** - * Models a reference from one artifact to another. This represents the information found in the content - * of an artifact, and is very type-specific. For example, a JSON schema reference might look like this: + * Models a reference from one artifact to another. This represents the information found in the content of an + * artifact, and is very type-specific. For example, a JSON schema reference might look like this: * *
  * {
@@ -13,13 +13,11 @@
  * 
* * In this case, the fields of this type will be: - * *
    - *
  • fullReference: types/data-types.json#/$defs/FooType
  • - *
  • resource: types/data-types.json
  • - *
  • component: #/$defs/FooType
  • + *
  • fullReference: types/data-types.json#/$defs/FooType
  • + *
  • resource: types/data-types.json
  • + *
  • component: #/$defs/FooType
  • *
- * * For an Avro artifact a reference might look like this: * *
@@ -30,22 +28,21 @@
  * 
* * In this case, the fields of this type will be: - * *
    - *
  • fullReference: com.kubetrade.schema.common.Exchange
  • - *
  • resource: com.kubetrade.schema.common.Exchange
  • - *
  • component: null
  • + *
  • fullReference: com.kubetrade.schema.common.Exchange
  • + *
  • resource: com.kubetrade.schema.common.Exchange
  • + *
  • component: null
  • *
- * */ public class ExternalReference { private String fullReference; private String resource; private String component; - + /** * Constructor. + * * @param fullReference * @param resource * @param component @@ -55,10 +52,11 @@ public ExternalReference(String fullReference, String resource, String component this.resource = resource; this.component = component; } - + /** - * Constructor. This variant is useful if there is no component part of an external reference. In this + * Constructor. This variant is useful if there is no component part of an external reference. In this * case the full reference is also the resource (and the component is null). + * * @param reference */ public ExternalReference(String reference) { @@ -129,7 +127,7 @@ public boolean equals(Object obj) { ExternalReference other = (ExternalReference) obj; return Objects.equals(fullReference, other.fullReference); } - + /** * @see java.lang.Object#toString() */ diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/JsonPointerExternalReference.java b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/JsonPointerExternalReference.java index 8d64e539b2..baf883ac04 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/JsonPointerExternalReference.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/JsonPointerExternalReference.java @@ -4,6 +4,7 @@ public class JsonPointerExternalReference extends ExternalReference { /** * Constructor. + * * @param jsonPointer */ public JsonPointerExternalReference(String jsonPointer) { diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/NoOpReferenceFinder.java b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/NoOpReferenceFinder.java index 4181ab7b1a..9634da5ac5 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/NoOpReferenceFinder.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/NoOpReferenceFinder.java @@ -6,11 +6,11 @@ import java.util.Set; public class NoOpReferenceFinder implements ReferenceFinder { - + public static final ReferenceFinder INSTANCE = new NoOpReferenceFinder(); /** - * @see io.apicurio.registry.content.refs.ReferenceFinder#findExternalReferences(TypedContent) + * @see io.apicurio.registry.content.refs.ReferenceFinder#findExternalReferences(TypedContent) */ @Override public Set findExternalReferences(TypedContent content) { diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ReferenceFinder.java b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ReferenceFinder.java index 162f3ab624..682615aac1 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ReferenceFinder.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/refs/ReferenceFinder.java @@ -8,6 +8,7 @@ public interface ReferenceFinder { /** * Finds the set of external references in a piece of content. + * * @param content */ public Set findExternalReferences(TypedContent content); diff --git a/schema-util/common/src/main/java/io/apicurio/registry/content/util/ContentTypeUtil.java b/schema-util/common/src/main/java/io/apicurio/registry/content/util/ContentTypeUtil.java index 02e11dc819..c0569e82a2 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/content/util/ContentTypeUtil.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/content/util/ContentTypeUtil.java @@ -9,13 +9,14 @@ import org.xml.sax.InputSource; import org.xml.sax.helpers.DefaultHandler; -import javax.xml.parsers.SAXParser; -import javax.xml.parsers.SAXParserFactory; import java.io.IOException; import java.io.StringReader; +import javax.xml.parsers.SAXParser; +import javax.xml.parsers.SAXParserFactory; + public final class ContentTypeUtil { - + public static final String CT_APPLICATION_JSON = "application/json"; public static final String CT_APPLICATION_CREATE_EXTENDED = "application/create.extended+json"; public static final String CT_APPLICATION_GET_EXTENDED = "application/get.extended+json"; diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolation.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolation.java index 73504bc303..12bad7305b 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolation.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolation.java @@ -15,6 +15,7 @@ public RuleViolation() { /** * Constructor. + * * @param description * @param context */ diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolationException.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolationException.java index 6dfc8d2898..f14863d115 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolationException.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/RuleViolationException.java @@ -9,9 +9,7 @@ import java.util.Set; /** - * Exception thrown when a configured rule is violated, rejecting an artifact content - * update. - * + * Exception thrown when a configured rule is violated, rejecting an artifact content update. */ public class RuleViolationException extends RegistryException { @@ -34,7 +32,8 @@ public class RuleViolationException extends RegistryException { * @param ruleConfiguration * @param cause */ - public RuleViolationException(String message, RuleType ruleType, String ruleConfiguration, Throwable cause) { + public RuleViolationException(String message, RuleType ruleType, String ruleConfiguration, + Throwable cause) { super(message, cause); this.ruleType = ruleType; this.ruleConfiguration = Optional.ofNullable(ruleConfiguration); @@ -49,7 +48,8 @@ public RuleViolationException(String message, RuleType ruleType, String ruleConf * @param ruleConfiguration * @param causes */ - public RuleViolationException(String message, RuleType ruleType, String ruleConfiguration, Set causes) { + public RuleViolationException(String message, RuleType ruleType, String ruleConfiguration, + Set causes) { super(message); this.ruleType = ruleType; this.ruleConfiguration = Optional.ofNullable(ruleConfiguration); @@ -66,7 +66,7 @@ public RuleViolationException(String message, RuleType ruleType, String ruleConf * @param cause */ public RuleViolationException(String message, RuleType ruleType, String ruleConfiguration, - Set causes, Throwable cause) { + Set causes, Throwable cause) { super(message, cause); this.ruleType = ruleType; this.ruleConfiguration = Optional.ofNullable(ruleConfiguration); @@ -75,9 +75,8 @@ public RuleViolationException(String message, RuleType ruleType, String ruleConf @Override public String getMessage() { - return super.getMessage() + causes.stream() - .map(rv -> rv.getDescription() + (rv.getContext() != null && !rv.getContext().isBlank() ? " at " + rv.getContext() : "")) - .reduce((left, right) -> left + ", " + right) - .map(s -> " Causes: " + s).orElse(""); + return super.getMessage() + causes.stream().map(rv -> rv.getDescription() + + (rv.getContext() != null && !rv.getContext().isBlank() ? " at " + rv.getContext() : "")) + .reduce((left, right) -> left + ", " + right).map(s -> " Causes: " + s).orElse(""); } } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/UnprocessableSchemaException.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/UnprocessableSchemaException.java index a283924422..1f4dfa68f3 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/UnprocessableSchemaException.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/UnprocessableSchemaException.java @@ -4,7 +4,6 @@ /** * Similar to "UnprocessableEntityException" but bound to the artifact type utils tools - * */ public class UnprocessableSchemaException extends RegistryException { diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/AbstractCompatibilityChecker.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/AbstractCompatibilityChecker.java index 217bf2521e..fce34b0c39 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/AbstractCompatibilityChecker.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/AbstractCompatibilityChecker.java @@ -15,8 +15,9 @@ public abstract class AbstractCompatibilityChecker implements CompatibilityChecker { @Override - public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, List existingArtifacts, - TypedContent proposedArtifact, Map resolvedReferences) { + public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, + List existingArtifacts, TypedContent proposedArtifact, + Map resolvedReferences) { requireNonNull(compatibilityLevel, "compatibilityLevel MUST NOT be null"); requireNonNull(existingArtifacts, "existingSchemas MUST NOT be null"); requireNonNull(proposedArtifact, "proposedSchema MUST NOT be null"); @@ -28,61 +29,72 @@ public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compati final String proposedArtifactContent = proposedArtifact.getContent().content(); Set incompatibleDiffs = new HashSet<>(); - String lastExistingSchema = existingArtifacts.get(existingArtifacts.size() - 1).getContent().content(); + String lastExistingSchema = existingArtifacts.get(existingArtifacts.size() - 1).getContent() + .content(); switch (compatibilityLevel) { case BACKWARD: - incompatibleDiffs = isBackwardsCompatibleWith(lastExistingSchema, proposedArtifactContent, resolvedReferences); + incompatibleDiffs = isBackwardsCompatibleWith(lastExistingSchema, proposedArtifactContent, + resolvedReferences); break; case BACKWARD_TRANSITIVE: - incompatibleDiffs = transitively(existingArtifacts, proposedArtifactContent, (existing, proposed) -> isBackwardsCompatibleWith(existing, proposed, resolvedReferences)); + incompatibleDiffs = transitively(existingArtifacts, proposedArtifactContent, (existing, + proposed) -> isBackwardsCompatibleWith(existing, proposed, resolvedReferences)); break; case FORWARD: - incompatibleDiffs = isBackwardsCompatibleWith(proposedArtifactContent, lastExistingSchema, resolvedReferences); + incompatibleDiffs = isBackwardsCompatibleWith(proposedArtifactContent, lastExistingSchema, + resolvedReferences); break; case FORWARD_TRANSITIVE: - incompatibleDiffs = transitively(existingArtifacts, proposedArtifactContent, (existing, proposed) -> isBackwardsCompatibleWith(proposed, existing, resolvedReferences)); + incompatibleDiffs = transitively(existingArtifacts, proposedArtifactContent, (existing, + proposed) -> isBackwardsCompatibleWith(proposed, existing, resolvedReferences)); break; case FULL: - incompatibleDiffs = ImmutableSet.builder() - .addAll(isBackwardsCompatibleWith(lastExistingSchema, proposedArtifactContent, resolvedReferences)) - .addAll(isBackwardsCompatibleWith(proposedArtifactContent, lastExistingSchema, resolvedReferences)) + incompatibleDiffs = ImmutableSet. builder() + .addAll(isBackwardsCompatibleWith(lastExistingSchema, proposedArtifactContent, + resolvedReferences)) + .addAll(isBackwardsCompatibleWith(proposedArtifactContent, lastExistingSchema, + resolvedReferences)) .build(); break; case FULL_TRANSITIVE: - incompatibleDiffs = ImmutableSet.builder() - .addAll(transitively(existingArtifacts, proposedArtifactContent, (existing, proposed) -> isBackwardsCompatibleWith(existing, proposed, resolvedReferences))) // Backward - .addAll(transitively(existingArtifacts, proposedArtifactContent, (existing, proposed) -> isBackwardsCompatibleWith(proposed, existing, resolvedReferences))) // Backward + incompatibleDiffs = ImmutableSet. builder() + .addAll(transitively(existingArtifacts, proposedArtifactContent, + (existing, proposed) -> isBackwardsCompatibleWith(existing, proposed, + resolvedReferences))) // Backward + .addAll(transitively(existingArtifacts, proposedArtifactContent, + (existing, proposed) -> isBackwardsCompatibleWith(proposed, existing, + resolvedReferences))) // Backward .build(); break; case NONE: break; } - Set diffs = - incompatibleDiffs - .stream() - .map(this::transform) - .collect(Collectors.toSet()); + Set diffs = incompatibleDiffs.stream().map(this::transform) + .collect(Collectors.toSet()); return CompatibilityExecutionResult.incompatibleOrEmpty(diffs); } /** - * Given a proposed schema, walk the existing schemas in reverse order (i.e. newest to oldest), - * and for each pair (existing, proposed) call the check function. + * Given a proposed schema, walk the existing schemas in reverse order (i.e. newest to oldest), and for + * each pair (existing, proposed) call the check function. * * @return The collected set of differences. */ private Set transitively(List existingSchemas, String proposedSchema, - BiFunction> checkExistingProposed) { + BiFunction> checkExistingProposed) { Set result = new HashSet<>(); - for (int i = existingSchemas.size() - 1; i >= 0; i--) { // TODO This may become too slow, more wide refactoring needed. - Set current = checkExistingProposed.apply(existingSchemas.get(i).getContent().content(), proposedSchema); + for (int i = existingSchemas.size() - 1; i >= 0; i--) { // TODO This may become too slow, more wide + // refactoring needed. + Set current = checkExistingProposed.apply(existingSchemas.get(i).getContent().content(), + proposedSchema); result.addAll(current); } return result; } - protected abstract Set isBackwardsCompatibleWith(String existing, String proposed, Map resolvedReferences); + protected abstract Set isBackwardsCompatibleWith(String existing, String proposed, + Map resolvedReferences); protected abstract CompatibilityDifference transform(D original); } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityChecker.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityChecker.java index caa4387080..dd7c20d97b 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityChecker.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityChecker.java @@ -6,21 +6,21 @@ import java.util.Map; /** - * An interface that is used to determine whether a proposed artifact's content is compatible and return a set of - * incompatible differences - * with older version(s) of the same content, based on a given compatibility level. - * + * An interface that is used to determine whether a proposed artifact's content is compatible and return a set + * of incompatible differences with older version(s) of the same content, based on a given compatibility + * level. */ public interface CompatibilityChecker { /** * @param compatibilityLevel MUST NOT be null - * @param existingArtifacts MUST NOT be null and MUST NOT contain null elements, - * but may be empty if the rule is executed and the artifact does not exist - * (e.g. a global COMPATIBILITY rule with io.apicurio.registry.rules.RuleApplicationType#CREATE) - * @param proposedArtifact MUST NOT be null + * @param existingArtifacts MUST NOT be null and MUST NOT contain null elements, but may be empty if the + * rule is executed and the artifact does not exist (e.g. a global COMPATIBILITY rule with + * io.apicurio.registry.rules.RuleApplicationType#CREATE) + * @param proposedArtifact MUST NOT be null */ - CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, List existingArtifacts, - TypedContent proposedArtifact, Map resolvedReferences); + CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, + List existingArtifacts, TypedContent proposedArtifact, + Map resolvedReferences); } \ No newline at end of file diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityDifference.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityDifference.java index 76a8a6ab9e..4124f93d7d 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityDifference.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityDifference.java @@ -3,9 +3,8 @@ import io.apicurio.registry.rules.RuleViolation; /** - * Represents a single compatibility difference. These are generated when doing compatibility checking - * between two versions of an artifact. A non-zero collection of these indicates a compatibility violation. - * + * Represents a single compatibility difference. These are generated when doing compatibility checking between + * two versions of an artifact. A non-zero collection of these indicates a compatibility violation. */ public interface CompatibilityDifference { diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityExecutionResult.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityExecutionResult.java index 2081f97478..3b32a14bb6 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityExecutionResult.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityExecutionResult.java @@ -6,9 +6,8 @@ /** * Created by aohana *

- * Holds the result for a compatibility check - * incompatibleDifferences - will contain values in case the schema type has difference type information in case the - * new schema is not compatible (only JSON schema as of now) + * Holds the result for a compatibility check incompatibleDifferences - will contain values in case the schema + * type has difference type information in case the new schema is not compatible (only JSON schema as of now) */ public class CompatibilityExecutionResult { @@ -31,18 +30,19 @@ public static CompatibilityExecutionResult compatible() { } /** - * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. This - * variant takes the set of {@link CompatibilityDifference}s as the basis of the result. A non-zero number - * of differences indicates incompatibility. + * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. + * This variant takes the set of {@link CompatibilityDifference}s as the basis of the result. A non-zero + * number of differences indicates incompatibility. */ - public static CompatibilityExecutionResult incompatibleOrEmpty(Set incompatibleDifferences) { + public static CompatibilityExecutionResult incompatibleOrEmpty( + Set incompatibleDifferences) { return new CompatibilityExecutionResult(incompatibleDifferences); } /** - * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. This - * variant takes an Exception and converts that into a set of differences. Ideally this would never be used, - * but some artifact types do not have the level of granularity to report individual differences. + * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. + * This variant takes an Exception and converts that into a set of differences. Ideally this would never + * be used, but some artifact types do not have the level of granularity to report individual differences. */ public static CompatibilityExecutionResult incompatible(Exception e) { CompatibilityDifference diff = new SimpleCompatibilityDifference(e.getMessage()); @@ -50,8 +50,8 @@ public static CompatibilityExecutionResult incompatible(Exception e) { } /** - * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. This - * variant takes a message. + * Creates an instance of {@link CompatibilityExecutionResult} that represents "incompatible" results. + * This variant takes a message. */ public static CompatibilityExecutionResult incompatible(String message) { CompatibilityDifference diff = new SimpleCompatibilityDifference(message); diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityLevel.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityLevel.java index cae3d6e116..2481570099 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityLevel.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/CompatibilityLevel.java @@ -1,11 +1,5 @@ package io.apicurio.registry.rules.compatibility; public enum CompatibilityLevel { - BACKWARD, - BACKWARD_TRANSITIVE, - FORWARD, - FORWARD_TRANSITIVE, - FULL, - FULL_TRANSITIVE, - NONE + BACKWARD, BACKWARD_TRANSITIVE, FORWARD, FORWARD_TRANSITIVE, FULL, FULL_TRANSITIVE, NONE } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/NoopCompatibilityChecker.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/NoopCompatibilityChecker.java index 07cefcb897..1754d09b9d 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/NoopCompatibilityChecker.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/compatibility/NoopCompatibilityChecker.java @@ -11,11 +11,12 @@ public class NoopCompatibilityChecker implements CompatibilityChecker { public static CompatibilityChecker INSTANCE = new NoopCompatibilityChecker(); /** - * @see CompatibilityChecker#testCompatibility(CompatibilityLevel, List, TypedContent, Map) + * @see CompatibilityChecker#testCompatibility(CompatibilityLevel, List, TypedContent, Map) */ @Override - public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, List existingArtifacts, - TypedContent proposedArtifact, Map resolvedReferences) { + public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, + List existingArtifacts, TypedContent proposedArtifact, + Map resolvedReferences) { requireNonNull(compatibilityLevel, "compatibilityLevel MUST NOT be null"); requireNonNull(existingArtifacts, "existingSchemas MUST NOT be null"); requireNonNull(proposedArtifact, "proposedSchema MUST NOT be null"); diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/integrity/IntegrityLevel.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/integrity/IntegrityLevel.java index a9c148909b..ca7c19a74c 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/integrity/IntegrityLevel.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/integrity/IntegrityLevel.java @@ -6,5 +6,5 @@ public enum IntegrityLevel { NONE, REFS_EXIST, ALL_REFS_MAPPED, NO_DUPLICATES, FULL; - + } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ContentValidator.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ContentValidator.java index 0c58d3f682..17cf6df1db 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ContentValidator.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ContentValidator.java @@ -8,22 +8,23 @@ import java.util.Map; /** - * Validates content. Syntax and semantic validations are possible based on configuration. An - * implementation of this interface should exist for each content-type supported by the registry. - * - * Also provides validation of references. + * Validates content. Syntax and semantic validations are possible based on configuration. An implementation + * of this interface should exist for each content-type supported by the registry. Also provides validation of + * references. */ public interface ContentValidator { /** * Called to validate the given content. */ - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException; - + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException; + /** - * Ensures that all references in the content are represented in the list of passed references. This is used - * to ensure that the content does not have any references that are unmapped. + * Ensures that all references in the content are represented in the list of passed references. This is + * used to ensure that the content does not have any references that are unmapped. */ - public void validateReferences(TypedContent content, List references) throws RuleViolationException; + public void validateReferences(TypedContent content, List references) + throws RuleViolationException; } diff --git a/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ValidityLevel.java b/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ValidityLevel.java index 4f45047c05..73c056357d 100644 --- a/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ValidityLevel.java +++ b/schema-util/common/src/main/java/io/apicurio/registry/rules/validity/ValidityLevel.java @@ -7,5 +7,5 @@ public enum ValidityLevel { // TODO definitions NONE, SYNTAX_ONLY, FULL; - + } diff --git a/schema-util/common/src/test/java/io/apicurio/registry/rules/compatibility/CompatibilityTestExecutor.java b/schema-util/common/src/test/java/io/apicurio/registry/rules/compatibility/CompatibilityTestExecutor.java index 0629d80c0e..97f306664b 100644 --- a/schema-util/common/src/test/java/io/apicurio/registry/rules/compatibility/CompatibilityTestExecutor.java +++ b/schema-util/common/src/test/java/io/apicurio/registry/rules/compatibility/CompatibilityTestExecutor.java @@ -54,16 +54,18 @@ public Set execute(String testData) throws Exception { log.info("Running test case: {}", caseId); var original = testCaseData.get("original").toString(); - var originalCT = testCaseData.has("originalContentType") ? testCaseData.get("originalContentType").toString() : ContentTypes.APPLICATION_JSON; + var originalCT = testCaseData.has("originalContentType") + ? testCaseData.get("originalContentType").toString() : ContentTypes.APPLICATION_JSON; var originalTypedContent = TypedContent.create(original, originalCT); var updated = testCaseData.get("updated").toString(); - var updatedCT = testCaseData.has("updatedContentType") ? testCaseData.get("updatedContentType").toString() : ContentTypes.APPLICATION_JSON; + var updatedCT = testCaseData.has("updatedContentType") + ? testCaseData.get("updatedContentType").toString() : ContentTypes.APPLICATION_JSON; var updatedTypedContent = TypedContent.create(updated, updatedCT); - var resultBackward = checker.testCompatibility(CompatibilityLevel.BACKWARD, List.of(originalTypedContent), - updatedTypedContent, Collections.emptyMap()); - var resultForward = checker.testCompatibility(CompatibilityLevel.FORWARD, List.of(originalTypedContent), - updatedTypedContent, Collections.emptyMap()); + var resultBackward = checker.testCompatibility(CompatibilityLevel.BACKWARD, + List.of(originalTypedContent), updatedTypedContent, Collections.emptyMap()); + var resultForward = checker.testCompatibility(CompatibilityLevel.FORWARD, + List.of(originalTypedContent), updatedTypedContent, Collections.emptyMap()); switch (testCaseData.getString("compatibility")) { case "backward": @@ -107,13 +109,13 @@ public Set execute(String testData) throws Exception { public static void throwOnFailure(Set failed) { if (!failed.isEmpty()) { - throw new RuntimeException(failed.size() + " test cases failed: " + failed.stream() - .reduce("", (a, s) -> a + "\n" + s)); + throw new RuntimeException(failed.size() + " test cases failed: " + + failed.stream().reduce("", (a, s) -> a + "\n" + s)); } } private static void logFail(String caseId, CompatibilityExecutionResult resultBackward, - CompatibilityExecutionResult resultForward) { + CompatibilityExecutionResult resultForward) { log.error("\nFailed caseId: {}\nBackward {}: {}\nForward {}: {}\n", caseId, resultBackward.isCompatible(), resultBackward.getIncompatibleDifferences(), resultForward.isCompatible(), resultForward.getIncompatibleDifferences()); diff --git a/schema-util/graphql/pom.xml b/schema-util/graphql/pom.xml index 017293f088..fe62e549b2 100644 --- a/schema-util/graphql/pom.xml +++ b/schema-util/graphql/pom.xml @@ -1,53 +1,51 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-graphql - jar - apicurio-registry-schema-util-graphql + apicurio-registry-schema-util-graphql + jar + apicurio-registry-schema-util-graphql - + - - io.apicurio - apicurio-registry-schema-util-common - + + io.apicurio + apicurio-registry-schema-util-common + - - - com.graphql-java - graphql-java - - - + + + com.graphql-java + graphql-java + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/graphql/src/main/java/io/apicurio/registry/content/canon/GraphQLContentCanonicalizer.java b/schema-util/graphql/src/main/java/io/apicurio/registry/content/canon/GraphQLContentCanonicalizer.java index 3d8fc3c90b..d36c41b5c4 100644 --- a/schema-util/graphql/src/main/java/io/apicurio/registry/content/canon/GraphQLContentCanonicalizer.java +++ b/schema-util/graphql/src/main/java/io/apicurio/registry/content/canon/GraphQLContentCanonicalizer.java @@ -16,12 +16,13 @@ * A canonicalizer that handles GraphQL (SDL) formatted content. */ public class GraphQLContentCanonicalizer implements ContentCanonicalizer { - + private static final SchemaParser sparser = new SchemaParser(); private static final SchemaGenerator schemaGenerator = new SchemaGenerator(); private static final RuntimeWiring wiring = RuntimeWiring.newRuntimeWiring().build(); - private static final SchemaPrinter printer = new SchemaPrinter(Options.defaultOptions().includeDirectives(false)); - + private static final SchemaPrinter printer = new SchemaPrinter( + Options.defaultOptions().includeDirectives(false)); + /** * @see ContentCanonicalizer#canonicalize(TypedContent, Map) */ diff --git a/schema-util/graphql/src/main/java/io/apicurio/registry/rules/validity/GraphQLContentValidator.java b/schema-util/graphql/src/main/java/io/apicurio/registry/rules/validity/GraphQLContentValidator.java index 2e041d793d..5518651a70 100644 --- a/schema-util/graphql/src/main/java/io/apicurio/registry/rules/validity/GraphQLContentValidator.java +++ b/schema-util/graphql/src/main/java/io/apicurio/registry/rules/validity/GraphQLContentValidator.java @@ -24,13 +24,15 @@ public GraphQLContentValidator() { * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { new SchemaParser().parse(content.getContent().content()); } catch (Exception e) { e.printStackTrace(); - throw new RuleViolationException("Syntax violation for GraphQL artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for GraphQL artifact.", RuleType.VALIDITY, + level.name(), e); } } } @@ -39,7 +41,8 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { // Note: not yet implemented! } diff --git a/schema-util/json/pom.xml b/schema-util/json/pom.xml index afb4f2e992..cdca364c5e 100644 --- a/schema-util/json/pom.xml +++ b/schema-util/json/pom.xml @@ -1,99 +1,97 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-json - jar - apicurio-registry-schema-util-json + apicurio-registry-schema-util-json + jar + apicurio-registry-schema-util-json - - - - io.apicurio - apicurio-registry-schema-util-common - + + + + io.apicurio + apicurio-registry-schema-util-common + - - io.apicurio - apicurio-registry-schema-util-common - ${project.version} - test-jar - test - + + io.apicurio + apicurio-registry-schema-util-common + ${project.version} + test-jar + test + - - io.apicurio - apicurio-common-app-components-logging - + + io.apicurio + apicurio-common-app-components-logging + - - com.google.guava - guava - - - com.github.everit-org.json-schema - org.everit.json.schema - - - com.fasterxml.jackson.datatype - jackson-datatype-json-org - + + com.google.guava + guava + + + com.github.everit-org.json-schema + org.everit.json.schema + + + com.fasterxml.jackson.datatype + jackson-datatype-json-org + - - org.projectlombok - lombok - compile - + + org.projectlombok + lombok + compile + - - com.fasterxml.jackson.module - jackson-module-parameter-names - + + com.fasterxml.jackson.module + jackson-module-parameter-names + - - com.fasterxml.jackson.datatype - jackson-datatype-jdk8 - + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + - - com.fasterxml.jackson.datatype - jackson-datatype-jsr310 - + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + - - org.junit.jupiter - junit-jupiter - test - - + + org.junit.jupiter + junit-jupiter + test + + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/json/src/main/java/io/apicurio/registry/content/canon/JsonContentCanonicalizer.java b/schema-util/json/src/main/java/io/apicurio/registry/content/canon/JsonContentCanonicalizer.java index 8b7b8aa56a..30324b2043 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/content/canon/JsonContentCanonicalizer.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/content/canon/JsonContentCanonicalizer.java @@ -11,14 +11,13 @@ import java.util.Map; /** - * A common JSON content canonicalizer. This will remove any extra formatting such as whitespace - * and also sort all fields/properties for all objects (because ordering of properties does not - * matter in JSON). - * + * A common JSON content canonicalizer. This will remove any extra formatting such as whitespace and also sort + * all fields/properties for all objects (because ordering of properties does not matter in JSON). */ public class JsonContentCanonicalizer implements ContentCanonicalizer { - private final ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); + private final ObjectMapper mapper = new ObjectMapper() + .enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); /** * @see ContentCanonicalizer#canonicalize(TypedContent, Map) @@ -36,8 +35,8 @@ public TypedContent canonicalize(TypedContent content, Map } /** - * Perform any additional processing on the JSON node. The base JSON canonicalizer - * does nothing extra. + * Perform any additional processing on the JSON node. The base JSON canonicalizer does nothing extra. + * * @param node */ protected void processJsonNode(JsonNode node) { diff --git a/schema-util/json/src/main/java/io/apicurio/registry/content/dereference/JsonSchemaDereferencer.java b/schema-util/json/src/main/java/io/apicurio/registry/content/dereference/JsonSchemaDereferencer.java index 370dfd2bbb..4eba43cf58 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/content/dereference/JsonSchemaDereferencer.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/content/dereference/JsonSchemaDereferencer.java @@ -35,11 +35,13 @@ public class JsonSchemaDereferencer implements ContentDereferencer { @Override public TypedContent dereference(TypedContent content, Map resolvedReferences) { - throw new DereferencingNotSupportedException("Content dereferencing is not supported for JSON Schema"); + throw new DereferencingNotSupportedException( + "Content dereferencing is not supported for JSON Schema"); } /** - * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, java.util.Map) + * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, + * java.util.Map) */ @Override public TypedContent rewriteReferences(TypedContent content, Map resolvedReferenceUrls) { diff --git a/schema-util/json/src/main/java/io/apicurio/registry/content/extract/JsonContentExtractor.java b/schema-util/json/src/main/java/io/apicurio/registry/content/extract/JsonContentExtractor.java index 000bf48005..cd8a294265 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/content/extract/JsonContentExtractor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/content/extract/JsonContentExtractor.java @@ -1,14 +1,12 @@ package io.apicurio.registry.content.extract; -import java.io.IOException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; - import io.apicurio.registry.content.ContentHandle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; /** * Performs meta-data extraction for JSON Schema content. diff --git a/schema-util/json/src/main/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinder.java b/schema-util/json/src/main/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinder.java index ac31f2410d..e4827edf8f 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinder.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinder.java @@ -31,10 +31,8 @@ public Set findExternalReferences(TypedContent content) { Set externalTypes = new HashSet<>(); findExternalTypesIn(tree, externalTypes); - return externalTypes.stream() - .map(type -> new JsonPointerExternalReference(type)) - .filter(ref -> ref.getResource() != null) - .collect(Collectors.toSet()); + return externalTypes.stream().map(type -> new JsonPointerExternalReference(type)) + .filter(ref -> ref.getResource() != null).collect(Collectors.toSet()); } catch (Exception e) { log.error("Error finding external references in an Avro file.", e); return Collections.emptySet(); @@ -46,7 +44,8 @@ private static void findExternalTypesIn(JsonNode schema, Set externalTyp if (schema.has("$ref")) { String ref = schema.get("$ref").asText(null); if (ref != null) { - // TODO: the value of the ref should be resolved against the $id in this schema if it has one + // TODO: the value of the ref should be resolved against the $id in this schema if it has + // one externalTypes.add(ref); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityChecker.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityChecker.java index f6f12a0ef3..aab1bb70e5 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityChecker.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityChecker.java @@ -10,7 +10,8 @@ public class JsonSchemaCompatibilityChecker extends AbstractCompatibilityChecker { @Override - protected Set isBackwardsCompatibleWith(String existing, String proposed, Map resolvedReferences) { + protected Set isBackwardsCompatibleWith(String existing, String proposed, + Map resolvedReferences) { return JsonSchemaDiffLibrary.getIncompatibleDifferences(existing, proposed, resolvedReferences); } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityDifference.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityDifference.java index a97fc33347..fbcab18169 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityDifference.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/JsonSchemaCompatibilityDifference.java @@ -20,7 +20,8 @@ public class JsonSchemaCompatibilityDifference implements CompatibilityDifferenc /** * @see CompatibilityDifference#asRuleViolation() */ - @Override public RuleViolation asRuleViolation() { + @Override + public RuleViolation asRuleViolation() { return new RuleViolation(difference.getDiffType().getDescription(), difference.getPathUpdated()); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffLibrary.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffLibrary.java index abc6933396..206220cd66 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffLibrary.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffLibrary.java @@ -26,13 +26,14 @@ public class JsonSchemaDiffLibrary { /** * Find and analyze differences between two JSON schemas. * - * @param original Original/Previous/First/Left JSON schema representation - * @param updated Updated/Next/Second/Right JSON schema representation + * @param original Original/Previous/First/Left JSON schema representation + * @param updated Updated/Next/Second/Right JSON schema representation * @param resolvedReferences * @return an object to access the found differences: Original -> Updated * @throws IllegalArgumentException if the input is not a valid representation of a JsonSchema */ - public static DiffContext findDifferences(String original, String updated, Map resolvedReferences) { + public static DiffContext findDifferences(String original, String updated, + Map resolvedReferences) { try { JsonNode originalNode = MAPPER.readTree(original); JsonNode updatedNode = MAPPER.readTree(updated); @@ -44,9 +45,7 @@ public static DiffContext findDifferences(String original, String updated, Map resolvedReferences, SchemaLoader.SchemaLoaderBuilder schemaLoaderBuilder) { + private static void loadReferences(JsonNode jsonNode, Map resolvedReferences, + SchemaLoader.SchemaLoaderBuilder schemaLoaderBuilder) { SpecificationVersion spec = SpecificationVersion.DRAFT_7; if (jsonNode.has(SCHEMA_KEYWORD)) { String schema = jsonNode.get(SCHEMA_KEYWORD).asText(); if (schema != null) { - spec = SpecificationVersion.lookupByMetaSchemaUrl(schema).orElse(SpecificationVersion.DRAFT_7); + spec = SpecificationVersion.lookupByMetaSchemaUrl(schema) + .orElse(SpecificationVersion.DRAFT_7); } } @@ -80,7 +81,8 @@ private static void loadReferences(JsonNode jsonNode, Map for (Map.Entry stringStringEntry : resolvedReferences.entrySet()) { URI child = ReferenceResolver.resolve(idUri, stringStringEntry.getKey()); - schemaLoaderBuilder.registerSchemaByURI(child, new JSONObject(stringStringEntry.getValue().getContent().content())); + schemaLoaderBuilder.registerSchemaByURI(child, + new JSONObject(stringStringEntry.getValue().getContent().content())); } } @@ -90,11 +92,13 @@ public static DiffContext findDifferences(Schema originalSchema, Schema updatedS return rootContext; } - public static boolean isCompatible(String original, String updated, Map resolvedReferences) { + public static boolean isCompatible(String original, String updated, + Map resolvedReferences) { return findDifferences(original, updated, resolvedReferences).foundAllDifferencesAreCompatible(); } - public static Set getIncompatibleDifferences(String original, String updated, Map resolvedReferences) { + public static Set getIncompatibleDifferences(String original, String updated, + Map resolvedReferences) { return findDifferences(original, updated, resolvedReferences).getIncompatibleDifferences(); } } \ No newline at end of file diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaWrapperVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaWrapperVisitor.java index 8048c6b44e..cb13017510 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaWrapperVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaWrapperVisitor.java @@ -91,7 +91,6 @@ public void visitArraySchema(ArraySchemaWrapper arraySchema) { } } - public void visitItemSchemas(List itemSchemas) { } @@ -194,7 +193,6 @@ public void visitReferredSchema(SchemaWrapper schema) { } - public void visitObjectSchema(ObjectSchemaWrapper objectSchema) { visitSchema(objectSchema); visitRequiredProperties(objectSchema.getRequiredProperties()); @@ -237,7 +235,6 @@ public void visitAllPropertyDependencies(Map> propertyDepend } } - public void visitRequiredProperties(List requiredProperties) { for (String requiredPropName : requiredProperties) { visitRequiredPropertyName(requiredPropName); @@ -323,8 +320,8 @@ public void visitCombinedSchema(CombinedSchemaWrapper combinedSchema) { } else if (CombinedSchema.ONE_CRITERION == criterion) { visitOneOfCombinedSchema(combinedSchema); } else { - throw new IllegalStateException("Could not determine if the combined schema is " + - "'allOf', 'anyOf', or 'oneOf': " + combinedSchema); + throw new IllegalStateException("Could not determine if the combined schema is " + + "'allOf', 'anyOf', or 'oneOf': " + combinedSchema); } } @@ -349,7 +346,6 @@ public void visitAllOfCombinedSchema(CombinedSchemaWrapper schema) { } - public void visitConditionalSchema(ConditionalSchemaWrapper conditionalSchema) { visitSchema(conditionalSchema); conditionalSchema.getIfSchema().ifPresent(this::visitIfSchema); diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonUtil.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonUtil.java index fbaa46bee4..9756d3ab1a 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonUtil.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonUtil.java @@ -48,11 +48,13 @@ public static Schema readSchema(String content) throws JsonProcessingException { return readSchema(content, Collections.emptyMap(), true); } - public static Schema readSchema(String content, Map resolvedReferences) throws JsonProcessingException { + public static Schema readSchema(String content, Map resolvedReferences) + throws JsonProcessingException { return readSchema(content, resolvedReferences, true); } - public static Schema readSchema(String content, Map resolvedReferences, boolean validateDangling) throws JsonProcessingException { + public static Schema readSchema(String content, Map resolvedReferences, + boolean validateDangling) throws JsonProcessingException { JsonNode jsonNode = MAPPER.readTree(content); Schema schemaObj; // Extract the $schema to use for determining the id keyword @@ -74,14 +76,10 @@ public static Schema readSchema(String content, Map resolv } // First extract all references var refNodes = jsonNode.findValues("$ref"); - var refStrings = refNodes.stream() - .filter(JsonNode::isTextual) - .map(TextNode.class::cast) - .map(TextNode::textValue) - .collect(Collectors.toList()); + var refStrings = refNodes.stream().filter(JsonNode::isTextual).map(TextNode.class::cast) + .map(TextNode::textValue).collect(Collectors.toList()); - SchemaLoader.SchemaLoaderBuilder builder = SchemaLoader.builder() - .useDefaults(true).draftV7Support(); + SchemaLoader.SchemaLoaderBuilder builder = SchemaLoader.builder().useDefaults(true).draftV7Support(); var resolvedReferencesCopy = new HashMap<>(resolvedReferences); var referenceURIs = new ArrayList<>(); @@ -90,27 +88,26 @@ public static Schema readSchema(String content, Map resolv referenceURIs.add(referenceURI); var resolvedReference = resolvedReferencesCopy.remove(referenceURI.toString()); if (resolvedReference != null) { - builder.registerSchemaByURI(referenceURI, new JSONObject(resolvedReference.getContent().content())); + builder.registerSchemaByURI(referenceURI, + new JSONObject(resolvedReference.getContent().content())); } else { - /* Since we do not have the referenced content, - * we insert a placeholder schema, that will accept any JSON, - * to the reference lookup table of the library. - * This prevents the library from attempting to download the schema if `http://`, - * or trying to open a file if `file://`. - * This avoids potential security issues - * by us having to explicitly provide referenced content. - * For validation, we do not care about the reference format, - * while still requiring a valid URI. + /* + * Since we do not have the referenced content, we insert a placeholder schema, that will + * accept any JSON, to the reference lookup table of the library. This prevents the library + * from attempting to download the schema if `http://`, or trying to open a file if `file://`. + * This avoids potential security issues by us having to explicitly provide referenced + * content. For validation, we do not care about the reference format, while still requiring a + * valid URI. */ builder.registerSchemaByURI(referenceURI, new JSONObject()); } } // Check for dangling references. Do we want to do this as a separate rule? if (validateDangling && !resolvedReferencesCopy.isEmpty()) { - var msg = "There are unused references recorded for this content. " + - "Make sure you have not made a typo, otherwise remove the unused reference record(s). " + - "References in the content: " + referenceURIs + ", " + - "Unused reference records: " + new ArrayList<>(resolvedReferencesCopy.keySet()); + var msg = "There are unused references recorded for this content. " + + "Make sure you have not made a typo, otherwise remove the unused reference record(s). " + + "References in the content: " + referenceURIs + ", " + "Unused reference records: " + + new ArrayList<>(resolvedReferencesCopy.keySet()); throw new RegistryException(msg); } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ArraySchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ArraySchemaDiffVisitor.java index e38440ea97..b8bf81ed73 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ArraySchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ArraySchemaDiffVisitor.java @@ -9,20 +9,20 @@ import java.util.List; import java.util.Optional; +import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_EXTENDED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_FALSE_TO_TRUE; -import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_NARROWED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ADDITIONAL_ITEMS_TRUE_TO_FALSE; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ALL_ITEM_SCHEMA_ADDED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ALL_ITEM_SCHEMA_REMOVED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_ADDED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_REMOVED; -import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMA_ADDED; -import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMA_REMOVED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMAS_CHANGED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMAS_NARROWED; +import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMA_ADDED; +import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_ITEM_SCHEMA_REMOVED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_MAX_ITEMS_ADDED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_MAX_ITEMS_DECREASED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_MAX_ITEMS_INCREASED; @@ -33,8 +33,8 @@ import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_MIN_ITEMS_REMOVED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_CHANGED; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_UNCHANGED; -import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_UNIQUE_ITEMS_BOOLEAN_UNCHANGED; +import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffType.ARRAY_TYPE_UNIQUE_ITEMS_TRUE_TO_FALSE; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffUtil.diffBooleanTransition; import static io.apicurio.registry.rules.compatibility.jsonschema.diff.DiffUtil.diffInteger; @@ -47,7 +47,6 @@ public class ArraySchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final ArraySchema original; private ArraySchemaWrapper schema; @@ -66,30 +65,23 @@ public void visitArraySchema(ArraySchemaWrapper arraySchema) { @Override public void visitMinItems(Integer minItems) { - diffInteger(ctx.sub("minItems"), original.getMinItems(), minItems, - ARRAY_TYPE_MIN_ITEMS_ADDED, - ARRAY_TYPE_MIN_ITEMS_REMOVED, - ARRAY_TYPE_MIN_ITEMS_INCREASED, - ARRAY_TYPE_MIN_ITEMS_DECREASED); + diffInteger(ctx.sub("minItems"), original.getMinItems(), minItems, ARRAY_TYPE_MIN_ITEMS_ADDED, + ARRAY_TYPE_MIN_ITEMS_REMOVED, ARRAY_TYPE_MIN_ITEMS_INCREASED, ARRAY_TYPE_MIN_ITEMS_DECREASED); super.visitMinItems(minItems); } @Override public void visitMaxItems(Integer maxItems) { - diffInteger(ctx.sub("maxItems"), original.getMaxItems(), maxItems, - ARRAY_TYPE_MAX_ITEMS_ADDED, - ARRAY_TYPE_MAX_ITEMS_REMOVED, - ARRAY_TYPE_MAX_ITEMS_INCREASED, - ARRAY_TYPE_MAX_ITEMS_DECREASED); + diffInteger(ctx.sub("maxItems"), original.getMaxItems(), maxItems, ARRAY_TYPE_MAX_ITEMS_ADDED, + ARRAY_TYPE_MAX_ITEMS_REMOVED, ARRAY_TYPE_MAX_ITEMS_INCREASED, ARRAY_TYPE_MAX_ITEMS_DECREASED); super.visitMaxItems(maxItems); } @Override public void visitUniqueItems(boolean uniqueItems) { - diffBooleanTransition(ctx.sub("uniqueItems"), original.needsUniqueItems(), uniqueItems,false, - ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE, - ARRAY_TYPE_UNIQUE_ITEMS_TRUE_TO_FALSE, - ARRAY_TYPE_UNIQUE_ITEMS_BOOLEAN_UNCHANGED); + diffBooleanTransition(ctx.sub("uniqueItems"), original.needsUniqueItems(), uniqueItems, false, + ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE, ARRAY_TYPE_UNIQUE_ITEMS_TRUE_TO_FALSE, + ARRAY_TYPE_UNIQUE_ITEMS_BOOLEAN_UNCHANGED); super.visitUniqueItems(uniqueItems); } @@ -98,8 +90,7 @@ public void visitAllItemSchema(SchemaWrapper allItemSchema) { ctx.log("visitAllItemSchema: " + allItemSchema + " orig.: " + original.getAllItemSchema()); DiffContext subCtx = ctx.sub("allItemSchema"); if (diffSubschemaAddedRemoved(subCtx, original.getAllItemSchema(), allItemSchema, - ARRAY_TYPE_ALL_ITEM_SCHEMA_ADDED, - ARRAY_TYPE_ALL_ITEM_SCHEMA_REMOVED)) { + ARRAY_TYPE_ALL_ITEM_SCHEMA_ADDED, ARRAY_TYPE_ALL_ITEM_SCHEMA_REMOVED)) { allItemSchema.accept(new SchemaDiffVisitor(subCtx, original.getAllItemSchema())); } super.visitAllItemSchema(allItemSchema); @@ -108,15 +99,14 @@ public void visitAllItemSchema(SchemaWrapper allItemSchema) { @Override public void visitAdditionalItems(boolean additionalItems) { ctx.log("visitAdditionalItems: " + additionalItems); - if (diffBooleanTransition(ctx.sub("additionalItems"), original.permitsAdditionalItems(), additionalItems, true, - ARRAY_TYPE_ADDITIONAL_ITEMS_FALSE_TO_TRUE, - ARRAY_TYPE_ADDITIONAL_ITEMS_TRUE_TO_FALSE, - ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED)) { + if (diffBooleanTransition(ctx.sub("additionalItems"), original.permitsAdditionalItems(), + additionalItems, true, ARRAY_TYPE_ADDITIONAL_ITEMS_FALSE_TO_TRUE, + ARRAY_TYPE_ADDITIONAL_ITEMS_TRUE_TO_FALSE, ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED)) { if (additionalItems) { // both original and updated permit additionalItems - Schema updatedSchemaOfAdditionalItems = - schema.getSchemaOfAdditionalItems() == null ? null : schema.getSchemaOfAdditionalItems().getWrapped(); + Schema updatedSchemaOfAdditionalItems = schema.getSchemaOfAdditionalItems() == null ? null + : schema.getSchemaOfAdditionalItems().getWrapped(); diffSchemaOrTrue(ctx.sub("schemaOfAdditionalItems"), original.getSchemaOfAdditionalItems(), updatedSchemaOfAdditionalItems, ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_UNCHANGED, ARRAY_TYPE_ADDITIONAL_ITEMS_EXTENDED, ARRAY_TYPE_ADDITIONAL_ITEMS_NARROWED, @@ -138,15 +128,16 @@ public void visitItemSchemas(List itemSchemas) { if (updatedSize > size) { // adding items diffSubSchemasAdded(ctx.sub("addItemSchema"), itemSchemas.subList(size, updatedSize), - original.permitsAdditionalItems(), wrap(original.getSchemaOfAdditionalItems()), - schema.permitsAdditionalItems(), ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED, - ARRAY_TYPE_ITEM_SCHEMAS_NARROWED, ARRAY_TYPE_ITEM_SCHEMAS_CHANGED); + original.permitsAdditionalItems(), wrap(original.getSchemaOfAdditionalItems()), + schema.permitsAdditionalItems(), ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED, + ARRAY_TYPE_ITEM_SCHEMAS_NARROWED, ARRAY_TYPE_ITEM_SCHEMAS_CHANGED); } if (originalSize > size) { // removing items - diffSubSchemasRemoved(ctx.sub("removeItemSchema"), wrap(original.getItemSchemas().subList(size, originalSize)), - schema.permitsAdditionalItems(), schema.getSchemaOfAdditionalItems(), - original.permitsAdditionalItems(), ARRAY_TYPE_ITEM_SCHEMAS_NARROWED, - ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED, ARRAY_TYPE_ITEM_SCHEMAS_CHANGED); + diffSubSchemasRemoved(ctx.sub("removeItemSchema"), + wrap(original.getItemSchemas().subList(size, originalSize)), + schema.permitsAdditionalItems(), schema.getSchemaOfAdditionalItems(), + original.permitsAdditionalItems(), ARRAY_TYPE_ITEM_SCHEMAS_NARROWED, + ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED, ARRAY_TYPE_ITEM_SCHEMAS_CHANGED); } super.visitItemSchemas(itemSchemas); @@ -157,9 +148,8 @@ public void visitItemSchema(int index, SchemaWrapper itemSchema) { ctx.log("visitItemSchema: " + itemSchema); DiffContext subCtx = ctx.sub("items/" + index); if (diffSubschemaAddedRemoved(subCtx, - getExceptionally(subCtx, () -> original.getItemSchemas().get(index)), itemSchema, - ARRAY_TYPE_ITEM_SCHEMA_ADDED, - ARRAY_TYPE_ITEM_SCHEMA_REMOVED)) { + getExceptionally(subCtx, () -> original.getItemSchemas().get(index)), itemSchema, + ARRAY_TYPE_ITEM_SCHEMA_ADDED, ARRAY_TYPE_ITEM_SCHEMA_REMOVED)) { itemSchema.accept(new SchemaDiffVisitor(subCtx, original.getItemSchemas().get(index))); } super.visitItemSchema(index, itemSchema); @@ -176,8 +166,7 @@ public void visitContainedItemSchema(SchemaWrapper containedItemSchema) { ctx.log("visitContainedItemSchema: " + containedItemSchema); DiffContext subCtx = ctx.sub("containedItemSchema"); if (diffSubschemaAddedRemoved(subCtx, original.getContainedItemSchema(), containedItemSchema, - ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_ADDED, - ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_REMOVED)) { + ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_ADDED, ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_REMOVED)) { containedItemSchema.accept(new SchemaDiffVisitor(subCtx, original.getContainedItemSchema())); } super.visitContainedItemSchema(containedItemSchema); diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/CombinedSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/CombinedSchemaDiffVisitor.java index a7217b6df3..0416882707 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/CombinedSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/CombinedSchemaDiffVisitor.java @@ -33,7 +33,6 @@ public class CombinedSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final CombinedSchema original; @@ -46,8 +45,9 @@ public CombinedSchemaDiffVisitor(DiffContext ctx, CombinedSchema original) { public void visitCombinedSchema(CombinedSchemaWrapper schema) { // Check if the criterion has changed if (diffObjectIdentity(ctx.sub("[criterion]"), original.getCriterion(), schema.getCriterion(), - CombinedSchema.ANY_CRITERION, UNDEFINED_UNUSED, UNDEFINED_UNUSED, COMBINED_TYPE_CRITERION_EXTENDED, - COMBINED_TYPE_CRITERION_NARROWED, COMBINED_TYPE_CRITERION_CHANGED)) { + CombinedSchema.ANY_CRITERION, UNDEFINED_UNUSED, UNDEFINED_UNUSED, + COMBINED_TYPE_CRITERION_EXTENDED, COMBINED_TYPE_CRITERION_NARROWED, + COMBINED_TYPE_CRITERION_CHANGED)) { // prevent further analysis if it did super.visitCombinedSchema(schema); } @@ -71,17 +71,16 @@ public void visitAllOfCombinedSchema(CombinedSchemaWrapper schema) { super.visitAllOfCombinedSchema(schema); } - private void processSubschemas(CombinedSchemaWrapper schema, DiffType sizeIncreased, DiffType sizeDecreased) { + private void processSubschemas(CombinedSchemaWrapper schema, DiffType sizeIncreased, + DiffType sizeDecreased) { List originalSubschemas = new ArrayList<>(original.getSubschemas()); - List updatedSubschemas = new LinkedList<>(schema.getSubschemas()); // better for insert/remove + List updatedSubschemas = new LinkedList<>(schema.getSubschemas()); // better for + // insert/remove Map> compatibilityMap = new HashMap<>(); - diffInteger(ctx.sub("[size]"), originalSubschemas.size(), updatedSubschemas.size(), - UNDEFINED_UNUSED, - UNDEFINED_UNUSED, - sizeIncreased, - sizeDecreased); + diffInteger(ctx.sub("[size]"), originalSubschemas.size(), updatedSubschemas.size(), UNDEFINED_UNUSED, + UNDEFINED_UNUSED, sizeIncreased, sizeDecreased); if (originalSubschemas.size() <= updatedSubschemas.size()) { // try to match them for (Schema o : originalSubschemas) { @@ -100,8 +99,8 @@ private void processSubschemas(CombinedSchemaWrapper schema, DiffType sizeIncrea } } - Optional>> first = compatibilityMap.entrySet().stream() - .min(comparingInt(a -> a.getValue().size())); + Optional>> first = compatibilityMap.entrySet() + .stream().min(comparingInt(a -> a.getValue().size())); while (first.isPresent()) { // remove a value from the first set Optional val = first.get().getValue().stream().findAny(); @@ -116,8 +115,7 @@ private void processSubschemas(CombinedSchemaWrapper schema, DiffType sizeIncrea if (first.get().getValue().isEmpty()) compatibilityMap.remove(first.get().getKey()); - first = compatibilityMap.entrySet().stream() - .min(comparingInt(a -> a.getValue().size())); + first = compatibilityMap.entrySet().stream().min(comparingInt(a -> a.getValue().size())); } } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConditionalSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConditionalSchemaDiffVisitor.java index 933b4f8c54..90c211a209 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConditionalSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConditionalSchemaDiffVisitor.java @@ -28,7 +28,6 @@ public class ConditionalSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final ConditionalSchema original; @@ -45,39 +44,33 @@ public void visitConditionalSchema(ConditionalSchemaWrapper schema) { @Override public void visitIfSchema(SchemaWrapper ifSchema) { Schema o = original.getIfSchema().orElse(null); - compareSchema(ctx.sub("if"), o, ifSchema.getWrapped(), - CONDITIONAL_TYPE_IF_SCHEMA_ADDED, - CONDITIONAL_TYPE_IF_SCHEMA_REMOVED, - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BOTH, - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_NONE); + compareSchema(ctx.sub("if"), o, ifSchema.getWrapped(), CONDITIONAL_TYPE_IF_SCHEMA_ADDED, + CONDITIONAL_TYPE_IF_SCHEMA_REMOVED, CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BOTH, + CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, + CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, + CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_NONE); super.visitIfSchema(ifSchema); } @Override public void visitThenSchema(SchemaWrapper thenSchema) { Schema o = original.getThenSchema().orElse(null); - compareSchema(ctx.sub("then"), o, thenSchema.getWrapped(), - CONDITIONAL_TYPE_THEN_SCHEMA_ADDED, - CONDITIONAL_TYPE_THEN_SCHEMA_REMOVED, - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BOTH, - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_NONE); + compareSchema(ctx.sub("then"), o, thenSchema.getWrapped(), CONDITIONAL_TYPE_THEN_SCHEMA_ADDED, + CONDITIONAL_TYPE_THEN_SCHEMA_REMOVED, CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BOTH, + CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, + CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, + CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_NONE); super.visitThenSchema(thenSchema); } @Override public void visitElseSchema(SchemaWrapper elseSchema) { Schema o = original.getElseSchema().orElse(null); - compareSchema(ctx.sub("else"), o, elseSchema.getWrapped(), - CONDITIONAL_TYPE_ELSE_SCHEMA_ADDED, - CONDITIONAL_TYPE_ELSE_SCHEMA_REMOVED, - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BOTH, - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_NONE); + compareSchema(ctx.sub("else"), o, elseSchema.getWrapped(), CONDITIONAL_TYPE_ELSE_SCHEMA_ADDED, + CONDITIONAL_TYPE_ELSE_SCHEMA_REMOVED, CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BOTH, + CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, + CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, + CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_NONE); super.visitElseSchema(elseSchema); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConstSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConstSchemaDiffVisitor.java index f7ebc178d4..09b40bf2b5 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConstSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ConstSchemaDiffVisitor.java @@ -10,7 +10,6 @@ public class ConstSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final ConstSchema original; @@ -26,10 +25,8 @@ public void visitConstSchema(ConstSchemaWrapper schema) { @Override public void visitConstValue(Object value) { - diffObject(ctx.sub("const"), original.getPermittedValue(), value, - UNDEFINED_UNUSED, - UNDEFINED_UNUSED, - CONST_TYPE_VALUE_CHANGED); + diffObject(ctx.sub("const"), original.getPermittedValue(), value, UNDEFINED_UNUSED, UNDEFINED_UNUSED, + CONST_TYPE_VALUE_CHANGED); super.visitConstValue(value); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffContext.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffContext.java index fd1e0ad7d8..dd7030ebde 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffContext.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffContext.java @@ -25,20 +25,18 @@ public class DiffContext { final Set visited = new HashSet<>(); - - private DiffContext(DiffContext rootContext, DiffContext parentContext, String pathUpdated, Set visited) { + private DiffContext(DiffContext rootContext, DiffContext parentContext, String pathUpdated, + Set visited) { this.rootContext = rootContext; this.parentContext = parentContext; this.pathUpdated = pathUpdated; this.visited.addAll(visited); } - public DiffContext sub(String pathFragmentUpdated) { return new DiffContext(rootContext, this, pathUpdated + "/" + pathFragmentUpdated, this.visited); } - private void initRootContext(DiffContext rootContext) { if (this.rootContext != null || parentContext != null) throw new IllegalStateException(); @@ -47,7 +45,7 @@ private void initRootContext(DiffContext rootContext) { } public static DiffContext createRootContext(String basePathFragmentUpdated, Set visited) { - if(visited == null) + if (visited == null) visited = new HashSet<>(); DiffContext rootContext = new DiffContext(null, null, basePathFragmentUpdated, visited); rootContext.initRootContext(rootContext); @@ -58,25 +56,19 @@ public static DiffContext createRootContext() { return createRootContext("", null); } - private void addToDifferenceSets(Difference difference) { diff.add(difference); if (rootContext != this) parentContext.addToDifferenceSets(difference); } - public void addDifference(DiffType type, Object originalSubchema, Object updatedSubchema) { - Difference difference = Difference.builder() - .diffType(type) - .pathOriginal("") - .pathUpdated(pathUpdated) - .subSchemaOriginal(Objects.toString(originalSubchema)) // make sure toString is good enough - .subSchemaUpdated(Objects.toString(updatedSubchema)) - .build(); + Difference difference = Difference.builder().diffType(type).pathOriginal("").pathUpdated(pathUpdated) + .subSchemaOriginal(Objects.toString(originalSubchema)) // make sure toString is good enough + .subSchemaUpdated(Objects.toString(updatedSubchema)).build(); addToDifferenceSets(difference); -// if(!type.isBackwardsCompatible()) -// log.warn("New incompatible difference found: " + difference); + // if(!type.isBackwardsCompatible()) + // log.warn("New incompatible difference found: " + difference); } public void log(String message) { @@ -87,7 +79,6 @@ public Set getDiff() { return new HashSet<>(diff); } - /** * Return true, if this context contains an incompatible difference. */ @@ -96,7 +87,8 @@ public boolean foundIncompatibleDifference() { } public Set getIncompatibleDifferences() { - return diff.stream().filter(d -> !d.getDiffType().isBackwardsCompatible()).collect(Collectors.toSet()); + return diff.stream().filter(d -> !d.getDiffType().isBackwardsCompatible()) + .collect(Collectors.toSet()); } public boolean foundAllDifferencesAreCompatible() { @@ -105,10 +97,7 @@ public boolean foundAllDifferencesAreCompatible() { @Override public String toString() { - return "DiffContext {" + - " foundAllDifferencesAreCompatible = " + foundAllDifferencesAreCompatible() + - ", diff = " + diff + - ", pathAtUpdated = '" + pathUpdated + "'" + - " }"; + return "DiffContext {" + " foundAllDifferencesAreCompatible = " + foundAllDifferencesAreCompatible() + + ", diff = " + diff + ", pathAtUpdated = '" + pathUpdated + "'" + " }"; } } \ No newline at end of file diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffType.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffType.java index 31b1cfcc5a..2601c775d0 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffType.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffType.java @@ -2,232 +2,175 @@ public enum DiffType { - SUBSCHEMA_TYPE_CHANGED(false), - SUBSCHEMA_TYPE_CHANGED_TO_EMPTY_OR_TRUE(true), - - OBJECT_TYPE_REQUIRED_PROPERTIES_ADDED(false), - OBJECT_TYPE_REQUIRED_PROPERTIES_REMOVED(true), - OBJECT_TYPE_REQUIRED_PROPERTIES_CHANGED(true), - OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_ADDED(false), - OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_REMOVED(true), - - OBJECT_TYPE_PROPERTY_SCHEMA_ADDED(false), - OBJECT_TYPE_PROPERTY_SCHEMA_REMOVED(true), - - OBJECT_TYPE_MIN_PROPERTIES_ADDED(false), - OBJECT_TYPE_MIN_PROPERTIES_REMOVED(true), - OBJECT_TYPE_MIN_PROPERTIES_INCREASED(false), - OBJECT_TYPE_MIN_PROPERTIES_DECREASED(true), - - OBJECT_TYPE_MAX_PROPERTIES_ADDED(false), - OBJECT_TYPE_MAX_PROPERTIES_REMOVED(true), - OBJECT_TYPE_MAX_PROPERTIES_INCREASED(true), - OBJECT_TYPE_MAX_PROPERTIES_DECREASED(false), - - OBJECT_TYPE_ADDITIONAL_PROPERTIES_FALSE_TO_TRUE(true), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_TRUE_TO_FALSE(false), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_BOOLEAN_UNCHANGED(true), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_EXTENDED(true), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_NARROWED(false), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_UNCHANGED(true), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_CHANGED(false), - - OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_ADDED(false), - OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_REMOVED(true), - - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_ADDED(false), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_REMOVED(true), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_CHANGED(true), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_ADDED(false), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_REMOVED(true), - - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_CHANGED(true), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_ADDED(false), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_REMOVED(true), - - OBJECT_TYPE_SCHEMA_DEPENDENCIES_ADDED(false), - OBJECT_TYPE_SCHEMA_DEPENDENCIES_REMOVED(true), - OBJECT_TYPE_SCHEMA_DEPENDENCIES_CHANGED(true), - OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_ADDED(false), - OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_REMOVED(true), - - OBJECT_TYPE_PROPERTY_SCHEMAS_ADDED(false), - OBJECT_TYPE_PROPERTY_SCHEMAS_REMOVED(true), - OBJECT_TYPE_PROPERTY_SCHEMAS_CHANGED(false), - OBJECT_TYPE_PROPERTY_SCHEMAS_MEMBER_ADDED(false), - OBJECT_TYPE_PROPERTY_SCHEMAS_MEMBER_REMOVED(true), - OBJECT_TYPE_PROPERTY_SCHEMAS_EXTENDED(true), - OBJECT_TYPE_PROPERTY_SCHEMAS_NARROWED(false), - - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_ADDED(false), - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_REMOVED(true), - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_CHANGED(true), - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_ADDED(false), - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_REMOVED(true), - - - ARRAY_TYPE_MIN_ITEMS_ADDED(false), - ARRAY_TYPE_MIN_ITEMS_REMOVED(true), - ARRAY_TYPE_MIN_ITEMS_INCREASED(false), - ARRAY_TYPE_MIN_ITEMS_DECREASED(true), - - ARRAY_TYPE_MAX_ITEMS_ADDED(false), - ARRAY_TYPE_MAX_ITEMS_REMOVED(true), - ARRAY_TYPE_MAX_ITEMS_INCREASED(true), - ARRAY_TYPE_MAX_ITEMS_DECREASED(false), - - ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE(false), - ARRAY_TYPE_UNIQUE_ITEMS_TRUE_TO_FALSE(true), - ARRAY_TYPE_UNIQUE_ITEMS_BOOLEAN_UNCHANGED(true), - - ARRAY_TYPE_ADDITIONAL_ITEMS_FALSE_TO_TRUE(true), - ARRAY_TYPE_ADDITIONAL_ITEMS_TRUE_TO_FALSE(false), - ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED(true), - ARRAY_TYPE_ADDITIONAL_ITEMS_EXTENDED(true), - ARRAY_TYPE_ADDITIONAL_ITEMS_NARROWED(false), - ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_UNCHANGED(true), - ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_CHANGED(false), - - ARRAY_TYPE_ALL_ITEM_SCHEMA_ADDED(false), - ARRAY_TYPE_ALL_ITEM_SCHEMA_REMOVED(true), - - ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_ADDED(false), - ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_REMOVED(true), - - ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_ADDED(false), - ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_REMOVED(true), - - ARRAY_TYPE_ITEMS_SCHEMAS_LENGTH_INCREASED(false), - ARRAY_TYPE_ITEMS_SCHEMAS_LENGTH_DECREASED(false), - - ARRAY_TYPE_ITEM_SCHEMA_ADDED(false), - ARRAY_TYPE_ITEM_SCHEMA_REMOVED(false), // TODO where - - ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED(true), - ARRAY_TYPE_ITEM_SCHEMAS_NARROWED(false), - ARRAY_TYPE_ITEM_SCHEMAS_CHANGED(false), - - STRING_TYPE_MIN_LENGTH_ADDED(false), - STRING_TYPE_MIN_LENGTH_REMOVED(true), - STRING_TYPE_MIN_LENGTH_INCREASED(false), - STRING_TYPE_MIN_LENGTH_DECREASED(true), - - STRING_TYPE_MAX_LENGTH_ADDED(false), - STRING_TYPE_MAX_LENGTH_REMOVED(true), - STRING_TYPE_MAX_LENGTH_INCREASED(true), - STRING_TYPE_MAX_LENGTH_DECREASED(false), - - STRING_TYPE_PATTERN_ADDED(false), - STRING_TYPE_PATTERN_REMOVED(true), - STRING_TYPE_PATTERN_CHANGED(false), - - STRING_TYPE_FORMAT_ADDED(false), - STRING_TYPE_FORMAT_REMOVED(true), - STRING_TYPE_FORMAT_CHANGED(false), - - STRING_TYPE_CONTENT_ENCODING_ADDED(false), - STRING_TYPE_CONTENT_ENCODING_REMOVED(true), - STRING_TYPE_CONTENT_ENCODING_CHANGED(false), - - STRING_TYPE_CONTENT_MEDIA_TYPE_ADDED(false), - STRING_TYPE_CONTENT_MEDIA_TYPE_REMOVED(true), - STRING_TYPE_CONTENT_MEDIA_TYPE_CHANGED(false), + SUBSCHEMA_TYPE_CHANGED(false), SUBSCHEMA_TYPE_CHANGED_TO_EMPTY_OR_TRUE(true), + OBJECT_TYPE_REQUIRED_PROPERTIES_ADDED(false), OBJECT_TYPE_REQUIRED_PROPERTIES_REMOVED( + true), OBJECT_TYPE_REQUIRED_PROPERTIES_CHANGED( + true), OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_ADDED( + false), OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_REMOVED(true), - CONST_TYPE_VALUE_CHANGED(false), + OBJECT_TYPE_PROPERTY_SCHEMA_ADDED(false), OBJECT_TYPE_PROPERTY_SCHEMA_REMOVED(true), + OBJECT_TYPE_MIN_PROPERTIES_ADDED(false), OBJECT_TYPE_MIN_PROPERTIES_REMOVED( + true), OBJECT_TYPE_MIN_PROPERTIES_INCREASED(false), OBJECT_TYPE_MIN_PROPERTIES_DECREASED(true), - ENUM_TYPE_VALUES_CHANGED(true), - ENUM_TYPE_VALUES_MEMBER_ADDED(true), - ENUM_TYPE_VALUES_MEMBER_REMOVED(false), + OBJECT_TYPE_MAX_PROPERTIES_ADDED(false), OBJECT_TYPE_MAX_PROPERTIES_REMOVED( + true), OBJECT_TYPE_MAX_PROPERTIES_INCREASED(true), OBJECT_TYPE_MAX_PROPERTIES_DECREASED(false), + OBJECT_TYPE_ADDITIONAL_PROPERTIES_FALSE_TO_TRUE(true), OBJECT_TYPE_ADDITIONAL_PROPERTIES_TRUE_TO_FALSE( + false), OBJECT_TYPE_ADDITIONAL_PROPERTIES_BOOLEAN_UNCHANGED( + true), OBJECT_TYPE_ADDITIONAL_PROPERTIES_EXTENDED( + true), OBJECT_TYPE_ADDITIONAL_PROPERTIES_NARROWED( + false), OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_UNCHANGED( + true), OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_CHANGED(false), - NUMBER_TYPE_MINIMUM_ADDED(false), - NUMBER_TYPE_MINIMUM_REMOVED(true), - NUMBER_TYPE_MINIMUM_INCREASED(false), - NUMBER_TYPE_MINIMUM_DECREASED(true), + OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_ADDED(false), OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_REMOVED( + true), - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_FALSE_TO_TRUE(false), - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_TRUE_TO_FALSE(true), - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_UNCHANGED(true), + OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_ADDED(false), OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_REMOVED( + true), OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_CHANGED( + true), OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_ADDED( + false), OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_REMOVED(true), - NUMBER_TYPE_EXCLUSIVE_MINIMUM_ADDED(false), - NUMBER_TYPE_EXCLUSIVE_MINIMUM_REMOVED(true), - NUMBER_TYPE_EXCLUSIVE_MINIMUM_INCREASED(false), - NUMBER_TYPE_EXCLUSIVE_MINIMUM_DECREASED(true), + OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_CHANGED( + true), OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_ADDED( + false), OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_REMOVED(true), - NUMBER_TYPE_MAXIMUM_ADDED(false), - NUMBER_TYPE_MAXIMUM_REMOVED(true), - NUMBER_TYPE_MAXIMUM_INCREASED(true), - NUMBER_TYPE_MAXIMUM_DECREASED(false), + OBJECT_TYPE_SCHEMA_DEPENDENCIES_ADDED(false), OBJECT_TYPE_SCHEMA_DEPENDENCIES_REMOVED( + true), OBJECT_TYPE_SCHEMA_DEPENDENCIES_CHANGED( + true), OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_ADDED( + false), OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_REMOVED(true), - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_FALSE_TO_TRUE(false), - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_TRUE_TO_FALSE(true), - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_UNCHANGED(true), + OBJECT_TYPE_PROPERTY_SCHEMAS_ADDED(false), OBJECT_TYPE_PROPERTY_SCHEMAS_REMOVED( + true), OBJECT_TYPE_PROPERTY_SCHEMAS_CHANGED(false), OBJECT_TYPE_PROPERTY_SCHEMAS_MEMBER_ADDED( + false), OBJECT_TYPE_PROPERTY_SCHEMAS_MEMBER_REMOVED( + true), OBJECT_TYPE_PROPERTY_SCHEMAS_EXTENDED( + true), OBJECT_TYPE_PROPERTY_SCHEMAS_NARROWED(false), - NUMBER_TYPE_EXCLUSIVE_MAXIMUM_ADDED(false), - NUMBER_TYPE_EXCLUSIVE_MAXIMUM_REMOVED(true), - NUMBER_TYPE_EXCLUSIVE_MAXIMUM_INCREASED(true), - NUMBER_TYPE_EXCLUSIVE_MAXIMUM_DECREASED(false), + OBJECT_TYPE_PATTERN_PROPERTY_KEYS_ADDED(false), OBJECT_TYPE_PATTERN_PROPERTY_KEYS_REMOVED( + true), OBJECT_TYPE_PATTERN_PROPERTY_KEYS_CHANGED( + true), OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_ADDED( + false), OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_REMOVED(true), - NUMBER_TYPE_MULTIPLE_OF_ADDED(false), - NUMBER_TYPE_MULTIPLE_OF_REMOVED(true), - NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_DIVISIBLE(true), - NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE(false), + ARRAY_TYPE_MIN_ITEMS_ADDED(false), ARRAY_TYPE_MIN_ITEMS_REMOVED(true), ARRAY_TYPE_MIN_ITEMS_INCREASED( + false), ARRAY_TYPE_MIN_ITEMS_DECREASED(true), - NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE(false), - NUMBER_TYPE_INTEGER_REQUIRED_TRUE_TO_FALSE(true), - NUMBER_TYPE_INTEGER_REQUIRED_UNCHANGED(true), + ARRAY_TYPE_MAX_ITEMS_ADDED(false), ARRAY_TYPE_MAX_ITEMS_REMOVED(true), ARRAY_TYPE_MAX_ITEMS_INCREASED( + true), ARRAY_TYPE_MAX_ITEMS_DECREASED(false), - COMBINED_TYPE_CRITERION_EXTENDED(true), - COMBINED_TYPE_CRITERION_NARROWED(false), - COMBINED_TYPE_CRITERION_CHANGED(false), + ARRAY_TYPE_UNIQUE_ITEMS_FALSE_TO_TRUE(false), ARRAY_TYPE_UNIQUE_ITEMS_TRUE_TO_FALSE( + true), ARRAY_TYPE_UNIQUE_ITEMS_BOOLEAN_UNCHANGED(true), - COMBINED_TYPE_ONE_OF_SIZE_INCREASED(true), // As long as the existing subschemas maintain compatibility, checked separately. - COMBINED_TYPE_ONE_OF_SIZE_DECREASED(false), + ARRAY_TYPE_ADDITIONAL_ITEMS_FALSE_TO_TRUE(true), ARRAY_TYPE_ADDITIONAL_ITEMS_TRUE_TO_FALSE( + false), ARRAY_TYPE_ADDITIONAL_ITEMS_BOOLEAN_UNCHANGED(true), ARRAY_TYPE_ADDITIONAL_ITEMS_EXTENDED( + true), ARRAY_TYPE_ADDITIONAL_ITEMS_NARROWED( + false), ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_UNCHANGED( + true), ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_CHANGED(false), - COMBINED_TYPE_ALL_OF_SIZE_INCREASED(false), - COMBINED_TYPE_ALL_OF_SIZE_DECREASED(true), + ARRAY_TYPE_ALL_ITEM_SCHEMA_ADDED(false), ARRAY_TYPE_ALL_ITEM_SCHEMA_REMOVED(true), - COMBINED_TYPE_ANY_OF_SIZE_INCREASED(true), // As long as the existing subschemas maintain compatibility, checked separately. - COMBINED_TYPE_ANY_OF_SIZE_DECREASED(false), + ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_ADDED(false), ARRAY_TYPE_SCHEMA_OF_ADDITIONAL_ITEMS_REMOVED(true), - COMBINED_TYPE_SUBSCHEMA_NOT_COMPATIBLE(false), + ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_ADDED(false), ARRAY_TYPE_CONTAINED_ITEM_SCHEMA_REMOVED(true), + ARRAY_TYPE_ITEMS_SCHEMAS_LENGTH_INCREASED(false), ARRAY_TYPE_ITEMS_SCHEMAS_LENGTH_DECREASED(false), - CONDITIONAL_TYPE_IF_SCHEMA_ADDED(false), - CONDITIONAL_TYPE_IF_SCHEMA_REMOVED(false), - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BOTH(true), - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD(false), - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD(false), - CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_NONE(false), + ARRAY_TYPE_ITEM_SCHEMA_ADDED(false), ARRAY_TYPE_ITEM_SCHEMA_REMOVED(false), // TODO where - CONDITIONAL_TYPE_THEN_SCHEMA_ADDED(false), - CONDITIONAL_TYPE_THEN_SCHEMA_REMOVED(true), - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BOTH(true), - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD(true), - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD(false), - CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_NONE(false), + ARRAY_TYPE_ITEM_SCHEMAS_EXTENDED(true), ARRAY_TYPE_ITEM_SCHEMAS_NARROWED( + false), ARRAY_TYPE_ITEM_SCHEMAS_CHANGED(false), - CONDITIONAL_TYPE_ELSE_SCHEMA_ADDED(false), - CONDITIONAL_TYPE_ELSE_SCHEMA_REMOVED(true), - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BOTH(true), - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD(true), - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD(false), - CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_NONE(false), + STRING_TYPE_MIN_LENGTH_ADDED(false), STRING_TYPE_MIN_LENGTH_REMOVED( + true), STRING_TYPE_MIN_LENGTH_INCREASED(false), STRING_TYPE_MIN_LENGTH_DECREASED(true), - REFERENCE_TYPE_TARGET_SCHEMA_ADDED(false), - REFERENCE_TYPE_TARGET_SCHEMA_REMOVED(false), // TODO Would this cause validation error? + STRING_TYPE_MAX_LENGTH_ADDED(false), STRING_TYPE_MAX_LENGTH_REMOVED( + true), STRING_TYPE_MAX_LENGTH_INCREASED(true), STRING_TYPE_MAX_LENGTH_DECREASED(false), + STRING_TYPE_PATTERN_ADDED(false), STRING_TYPE_PATTERN_REMOVED(true), STRING_TYPE_PATTERN_CHANGED(false), - NOT_TYPE_SCHEMA_COMPATIBLE_BOTH(true), - NOT_TYPE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD(false), - NOT_TYPE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD(true), - NOT_TYPE_SCHEMA_COMPATIBLE_NONE(false), + STRING_TYPE_FORMAT_ADDED(false), STRING_TYPE_FORMAT_REMOVED(true), STRING_TYPE_FORMAT_CHANGED(false), + STRING_TYPE_CONTENT_ENCODING_ADDED(false), STRING_TYPE_CONTENT_ENCODING_REMOVED( + true), STRING_TYPE_CONTENT_ENCODING_CHANGED(false), - UNDEFINED_UNUSED(false); // Should not be used. + STRING_TYPE_CONTENT_MEDIA_TYPE_ADDED(false), STRING_TYPE_CONTENT_MEDIA_TYPE_REMOVED( + true), STRING_TYPE_CONTENT_MEDIA_TYPE_CHANGED(false), + + CONST_TYPE_VALUE_CHANGED(false), + + ENUM_TYPE_VALUES_CHANGED(true), ENUM_TYPE_VALUES_MEMBER_ADDED(true), ENUM_TYPE_VALUES_MEMBER_REMOVED( + false), + + NUMBER_TYPE_MINIMUM_ADDED(false), NUMBER_TYPE_MINIMUM_REMOVED(true), NUMBER_TYPE_MINIMUM_INCREASED( + false), NUMBER_TYPE_MINIMUM_DECREASED(true), + + NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_FALSE_TO_TRUE(false), NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_TRUE_TO_FALSE( + true), NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_UNCHANGED(true), + + NUMBER_TYPE_EXCLUSIVE_MINIMUM_ADDED(false), NUMBER_TYPE_EXCLUSIVE_MINIMUM_REMOVED( + true), NUMBER_TYPE_EXCLUSIVE_MINIMUM_INCREASED( + false), NUMBER_TYPE_EXCLUSIVE_MINIMUM_DECREASED(true), + + NUMBER_TYPE_MAXIMUM_ADDED(false), NUMBER_TYPE_MAXIMUM_REMOVED(true), NUMBER_TYPE_MAXIMUM_INCREASED( + true), NUMBER_TYPE_MAXIMUM_DECREASED(false), + + NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_FALSE_TO_TRUE(false), NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_TRUE_TO_FALSE( + true), NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_UNCHANGED(true), + NUMBER_TYPE_EXCLUSIVE_MAXIMUM_ADDED(false), NUMBER_TYPE_EXCLUSIVE_MAXIMUM_REMOVED( + true), NUMBER_TYPE_EXCLUSIVE_MAXIMUM_INCREASED( + true), NUMBER_TYPE_EXCLUSIVE_MAXIMUM_DECREASED(false), + + NUMBER_TYPE_MULTIPLE_OF_ADDED(false), NUMBER_TYPE_MULTIPLE_OF_REMOVED( + true), NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_DIVISIBLE( + true), NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE(false), + + NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE(false), NUMBER_TYPE_INTEGER_REQUIRED_TRUE_TO_FALSE( + true), NUMBER_TYPE_INTEGER_REQUIRED_UNCHANGED(true), + + COMBINED_TYPE_CRITERION_EXTENDED(true), COMBINED_TYPE_CRITERION_NARROWED( + false), COMBINED_TYPE_CRITERION_CHANGED(false), + + COMBINED_TYPE_ONE_OF_SIZE_INCREASED(true), // As long as the existing subschemas maintain compatibility, + // checked separately. + COMBINED_TYPE_ONE_OF_SIZE_DECREASED(false), + + COMBINED_TYPE_ALL_OF_SIZE_INCREASED(false), COMBINED_TYPE_ALL_OF_SIZE_DECREASED(true), + + COMBINED_TYPE_ANY_OF_SIZE_INCREASED(true), // As long as the existing subschemas maintain compatibility, + // checked separately. + COMBINED_TYPE_ANY_OF_SIZE_DECREASED(false), + + COMBINED_TYPE_SUBSCHEMA_NOT_COMPATIBLE(false), + + CONDITIONAL_TYPE_IF_SCHEMA_ADDED(false), CONDITIONAL_TYPE_IF_SCHEMA_REMOVED( + false), CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BOTH( + true), CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD( + false), CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD( + false), CONDITIONAL_TYPE_IF_SCHEMA_COMPATIBLE_NONE(false), + + CONDITIONAL_TYPE_THEN_SCHEMA_ADDED(false), CONDITIONAL_TYPE_THEN_SCHEMA_REMOVED( + true), CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BOTH( + true), CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD( + true), CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD( + false), CONDITIONAL_TYPE_THEN_SCHEMA_COMPATIBLE_NONE(false), + + CONDITIONAL_TYPE_ELSE_SCHEMA_ADDED(false), CONDITIONAL_TYPE_ELSE_SCHEMA_REMOVED( + true), CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BOTH( + true), CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD( + true), CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD( + false), CONDITIONAL_TYPE_ELSE_SCHEMA_COMPATIBLE_NONE(false), + + REFERENCE_TYPE_TARGET_SCHEMA_ADDED(false), REFERENCE_TYPE_TARGET_SCHEMA_REMOVED(false), // TODO Would this + // cause + // validation + // error? + + NOT_TYPE_SCHEMA_COMPATIBLE_BOTH(true), NOT_TYPE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD( + false), NOT_TYPE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD( + true), NOT_TYPE_SCHEMA_COMPATIBLE_NONE(false), + + UNDEFINED_UNUSED(false); // Should not be used. private String description; diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffUtil.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffUtil.java index 9134d9e62a..b49f8c88e9 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffUtil.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/DiffUtil.java @@ -21,7 +21,7 @@ public class DiffUtil { * @return true if both objects are present */ public static boolean diffAddedRemoved(DiffContext ctx, Object original, Object updated, - DiffType addedType, DiffType removedType) { + DiffType addedType, DiffType removedType) { if (original == null && updated != null) { ctx.addDifference(addedType, original, updated); } else if (original != null && updated == null) { @@ -33,8 +33,8 @@ public static boolean diffAddedRemoved(DiffContext ctx, Object original, Object } public static void diffSetChanged(DiffContext ctx, Set original, Set updated, - DiffType addedType, DiffType removedType, DiffType changedType, - DiffType addedMemberType, DiffType removedMemberType) { + DiffType addedType, DiffType removedType, DiffType changedType, DiffType addedMemberType, + DiffType removedMemberType) { if (diffAddedRemoved(ctx, original, updated, addedType, removedType)) { boolean changed = false; Set copyUpdated = new HashSet<>(updated); @@ -56,12 +56,11 @@ public static void diffSetChanged(DiffContext ctx, Set original, Set u } } - /** * @return true if both objects are present */ public static boolean diffSubschemaAddedRemoved(DiffContext ctx, Object original, Object updated, - DiffType addedType, DiffType removedType) { + DiffType addedType, DiffType removedType) { if (diffAddedRemoved(ctx, original, updated, addedType, removedType)) { return true; } @@ -73,9 +72,8 @@ public static boolean diffSubschemaAddedRemoved(DiffContext ctx, Object original * * @return true if the integers are defined and equal */ - public static boolean diffInteger(DiffContext ctx, Integer original, Integer updated, - DiffType addedType, DiffType removedType, - DiffType increasedType, DiffType decreasedType) { + public static boolean diffInteger(DiffContext ctx, Integer original, Integer updated, DiffType addedType, + DiffType removedType, DiffType increasedType, DiffType decreasedType) { if (diffAddedRemoved(ctx, original, updated, addedType, removedType)) { if (original < updated) { ctx.addDifference(increasedType, original, updated); @@ -93,9 +91,8 @@ public static boolean diffInteger(DiffContext ctx, Integer original, Integer upd * * @return true if the numbers are the same */ - public static boolean diffNumber(DiffContext ctx, Number original, Number updated, - DiffType addedType, DiffType removedType, - DiffType increasedType, DiffType decreasedType) { + public static boolean diffNumber(DiffContext ctx, Number original, Number updated, DiffType addedType, + DiffType removedType, DiffType increasedType, DiffType decreasedType) { if (diffAddedRemoved(ctx, original, updated, addedType, removedType)) { BigDecimal o = new BigDecimal(original.toString()); // Not pretty but it works:/ BigDecimal u = new BigDecimal(updated.toString()); @@ -111,7 +108,7 @@ public static boolean diffNumber(DiffContext ctx, Number original, Number update } public static void diffNumberOriginalMultipleOfUpdated(DiffContext ctx, Number original, Number updated, - DiffType multipleOfType, DiffType notMultipleOfType) { + DiffType multipleOfType, DiffType notMultipleOfType) { requireNonNull(original); requireNonNull(updated); BigDecimal o = new BigDecimal(original.toString()); // Not pretty but it works:/ @@ -127,8 +124,9 @@ public static void diffNumberOriginalMultipleOfUpdated(DiffContext ctx, Number o /** * */ - public static boolean diffBooleanTransition(DiffContext ctx, Boolean original, Boolean updated, Boolean defaultValue, - DiffType changeFalseToTrue, DiffType changeTrueToFalse, DiffType unchanged) { + public static boolean diffBooleanTransition(DiffContext ctx, Boolean original, Boolean updated, + Boolean defaultValue, DiffType changeFalseToTrue, DiffType changeTrueToFalse, + DiffType unchanged) { if (original == null) original = defaultValue; if (updated == null) @@ -147,10 +145,9 @@ public static boolean diffBooleanTransition(DiffContext ctx, Boolean original, B /** * added/removed/changed (using equals) */ - public static void diffObject(DiffContext ctx, Object original, Object updated, - DiffType addedType, DiffType removedType, DiffType changedType) { - if (diffAddedRemoved(ctx, original, updated, addedType, removedType) - && !original.equals(updated)) { + public static void diffObject(DiffContext ctx, Object original, Object updated, DiffType addedType, + DiffType removedType, DiffType changedType) { + if (diffAddedRemoved(ctx, original, updated, addedType, removedType) && !original.equals(updated)) { ctx.addDifference(changedType, original, updated); } } @@ -158,14 +155,13 @@ public static void diffObject(DiffContext ctx, Object original, Object updated, /** * added/removed/changed (using equals), with a default value specified */ - public static void diffObjectDefault(DiffContext ctx, Object original, Object updated, Object defaultValue, - DiffType addedType, DiffType removedType, DiffType changedType) { + public static void diffObjectDefault(DiffContext ctx, Object original, Object updated, + Object defaultValue, DiffType addedType, DiffType removedType, DiffType changedType) { if (Objects.equals(defaultValue, original)) original = null; if (Objects.equals(defaultValue, updated)) updated = null; - if (diffAddedRemoved(ctx, original, updated, addedType, removedType) - && !original.equals(updated)) { + if (diffAddedRemoved(ctx, original, updated, addedType, removedType) && !original.equals(updated)) { ctx.addDifference(changedType, original, updated); } } @@ -176,10 +172,9 @@ public static void diffObjectDefault(DiffContext ctx, Object original, Object up * @return true if they are equal */ public static boolean diffObjectIdentity(DiffContext ctx, Object original, Object updated, Object target, - DiffType addedType, DiffType removedType, DiffType extendedType, - DiffType narrowedType, DiffType changedType) { - if (diffAddedRemoved(ctx, original, updated, addedType, removedType) - && original != updated) { + DiffType addedType, DiffType removedType, DiffType extendedType, DiffType narrowedType, + DiffType changedType) { + if (diffAddedRemoved(ctx, original, updated, addedType, removedType) && original != updated) { if (updated == target) { ctx.addDifference(extendedType, original, updated); } else if (original == target) { @@ -193,9 +188,9 @@ public static boolean diffObjectIdentity(DiffContext ctx, Object original, Objec } public static void diffSubSchemasAdded(DiffContext ctx, List addedSchemas, - boolean originalPermitsAdditional, SchemaWrapper originalSchemaOfAdditional, - boolean updatedPermitsAdditional, DiffType extendedType, - DiffType narrowedType, DiffType changedType) { + boolean originalPermitsAdditional, SchemaWrapper originalSchemaOfAdditional, + boolean updatedPermitsAdditional, DiffType extendedType, DiffType narrowedType, + DiffType changedType) { if (!originalPermitsAdditional) { // original schema: additional = false ctx.addDifference(extendedType, null, addedSchemas); @@ -205,11 +200,11 @@ public static void diffSubSchemasAdded(DiffContext ctx, List adde ctx.addDifference(narrowedType, true, addedSchemas); } else { // original schema: additional = schema - if (!updatedPermitsAdditional && - areListOfSchemasCompatible(ctx, addedSchemas, originalSchemaOfAdditional, false)) { + if (!updatedPermitsAdditional + && areListOfSchemasCompatible(ctx, addedSchemas, originalSchemaOfAdditional, false)) { ctx.addDifference(narrowedType, originalSchemaOfAdditional, addedSchemas); - } else if (updatedPermitsAdditional && - areListOfSchemasCompatible(ctx, addedSchemas, originalSchemaOfAdditional, true)) { + } else if (updatedPermitsAdditional + && areListOfSchemasCompatible(ctx, addedSchemas, originalSchemaOfAdditional, true)) { ctx.addDifference(extendedType, originalSchemaOfAdditional, addedSchemas); } else { ctx.addDifference(changedType, originalSchemaOfAdditional, addedSchemas); @@ -219,9 +214,9 @@ public static void diffSubSchemasAdded(DiffContext ctx, List adde } public static void diffSubSchemasRemoved(DiffContext ctx, List removedSchemas, - boolean updatedPermitsAdditional, SchemaWrapper updatedSchemaOfAdditional, - boolean originalPermitsAdditional, DiffType narrowedType, - DiffType extendedType, DiffType changedType) { + boolean updatedPermitsAdditional, SchemaWrapper updatedSchemaOfAdditional, + boolean originalPermitsAdditional, DiffType narrowedType, DiffType extendedType, + DiffType changedType) { if (!updatedPermitsAdditional) { // updated schema: additional = false ctx.addDifference(narrowedType, removedSchemas, null); @@ -231,11 +226,11 @@ public static void diffSubSchemasRemoved(DiffContext ctx, List re ctx.addDifference(extendedType, removedSchemas, true); } else { // updated schema: additional = schema - if (!originalPermitsAdditional && - areListOfSchemasCompatible(ctx, removedSchemas, updatedSchemaOfAdditional, false)) { + if (!originalPermitsAdditional && areListOfSchemasCompatible(ctx, removedSchemas, + updatedSchemaOfAdditional, false)) { ctx.addDifference(extendedType, removedSchemas, updatedSchemaOfAdditional); - } else if (originalPermitsAdditional && - areListOfSchemasCompatible(ctx, removedSchemas, updatedSchemaOfAdditional, true)) { + } else if (originalPermitsAdditional + && areListOfSchemasCompatible(ctx, removedSchemas, updatedSchemaOfAdditional, true)) { ctx.addDifference(narrowedType, removedSchemas, updatedSchemaOfAdditional); } else { ctx.addDifference(changedType, removedSchemas, updatedSchemaOfAdditional); @@ -245,7 +240,7 @@ public static void diffSubSchemasRemoved(DiffContext ctx, List re } public static void diffSchemaOrTrue(DiffContext ctx, Schema original, Schema updated, DiffType bothType, - DiffType extendedType, DiffType narrowedType, DiffType noneType) { + DiffType extendedType, DiffType narrowedType, DiffType noneType) { if (original != null && updated == null) { // schema => true ctx.addDifference(extendedType, original, updated); @@ -258,20 +253,17 @@ public static void diffSchemaOrTrue(DiffContext ctx, Schema original, Schema upd } } - public static void compareSchema(DiffContext ctx, Schema original, Schema updated, - DiffType addedType, DiffType removedType, - DiffType bothType, - DiffType backwardNotForwardType, - DiffType forwardNotBackwardType, - DiffType noneType) { + public static void compareSchema(DiffContext ctx, Schema original, Schema updated, DiffType addedType, + DiffType removedType, DiffType bothType, DiffType backwardNotForwardType, + DiffType forwardNotBackwardType, DiffType noneType) { if (diffAddedRemoved(ctx, original, updated, addedType, removedType)) { - compareSchemaWhenExist(ctx, original, updated, bothType, backwardNotForwardType, forwardNotBackwardType, - noneType); + compareSchemaWhenExist(ctx, original, updated, bothType, backwardNotForwardType, + forwardNotBackwardType, noneType); } } - public static void compareSchemaWhenExist(DiffContext ctx, Schema original, Schema updated, DiffType bothType, - DiffType backwardType, DiffType forwardType, DiffType noneType) { + public static void compareSchemaWhenExist(DiffContext ctx, Schema original, Schema updated, + DiffType bothType, DiffType backwardType, DiffType forwardType, DiffType noneType) { boolean backward = isSchemaCompatible(ctx, original, updated, true); boolean forward = isSchemaCompatible(ctx, original, updated, false); @@ -289,17 +281,19 @@ public static void compareSchemaWhenExist(DiffContext ctx, Schema original, Sche } } - public static boolean areListOfSchemasCompatible(DiffContext ctx, List itemSchemas, SchemaWrapper additionalSchema, - boolean notReverse) { - for (SchemaWrapper itemSchema: itemSchemas) { - if (!isSchemaCompatible(ctx, itemSchema.getWrapped(), additionalSchema.getWrapped(), notReverse)) { + public static boolean areListOfSchemasCompatible(DiffContext ctx, List itemSchemas, + SchemaWrapper additionalSchema, boolean notReverse) { + for (SchemaWrapper itemSchema : itemSchemas) { + if (!isSchemaCompatible(ctx, itemSchema.getWrapped(), additionalSchema.getWrapped(), + notReverse)) { return false; } } return true; } - public static boolean isSchemaCompatible(DiffContext ctx, Schema original, Schema updated, boolean backward) { + public static boolean isSchemaCompatible(DiffContext ctx, Schema original, Schema updated, + boolean backward) { DiffContext rootCtx = DiffContext.createRootContext("", ctx.visited); if (backward) { new SchemaDiffVisitor(rootCtx, original).visit(wrap(updated)); diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/EnumSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/EnumSchemaDiffVisitor.java index 19190d85f3..67d065328c 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/EnumSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/EnumSchemaDiffVisitor.java @@ -14,7 +14,6 @@ public class EnumSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final EnumSchema original; @@ -30,13 +29,8 @@ public void visitEnumSchema(EnumSchemaWrapper schema) { @Override public void visitEnumValues(Set values) { - diffSetChanged(ctx.sub("enum"), - original.getPossibleValues(), - values, - UNDEFINED_UNUSED, - UNDEFINED_UNUSED, - ENUM_TYPE_VALUES_CHANGED, - ENUM_TYPE_VALUES_MEMBER_ADDED, - ENUM_TYPE_VALUES_MEMBER_REMOVED); + diffSetChanged(ctx.sub("enum"), original.getPossibleValues(), values, UNDEFINED_UNUSED, + UNDEFINED_UNUSED, ENUM_TYPE_VALUES_CHANGED, ENUM_TYPE_VALUES_MEMBER_ADDED, + ENUM_TYPE_VALUES_MEMBER_REMOVED); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NotSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NotSchemaDiffVisitor.java index 719a261b5d..86e58e13f1 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NotSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NotSchemaDiffVisitor.java @@ -14,7 +14,6 @@ public class NotSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private DiffContext ctx; private final NotSchema original; @@ -28,13 +27,10 @@ public void visitNotSchema(NotSchemaWrapper notSchema) { } public void visitSchemaMustNotMatch(SchemaWrapper mustNotMatch) { - compareSchema(ctx.sub("not"), original.getMustNotMatch(), mustNotMatch.getWrapped(), - UNDEFINED_UNUSED, - UNDEFINED_UNUSED, - NOT_TYPE_SCHEMA_COMPATIBLE_BOTH, - NOT_TYPE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, - NOT_TYPE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, - NOT_TYPE_SCHEMA_COMPATIBLE_NONE); + compareSchema(ctx.sub("not"), original.getMustNotMatch(), mustNotMatch.getWrapped(), UNDEFINED_UNUSED, + UNDEFINED_UNUSED, NOT_TYPE_SCHEMA_COMPATIBLE_BOTH, + NOT_TYPE_SCHEMA_COMPATIBLE_BACKWARD_NOT_FORWARD, + NOT_TYPE_SCHEMA_COMPATIBLE_FORWARD_NOT_BACKWARD, NOT_TYPE_SCHEMA_COMPATIBLE_NONE); super.visitSchemaMustNotMatch(mustNotMatch); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NumberSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NumberSchemaDiffVisitor.java index ecab95aaa1..8706b2d25c 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NumberSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/NumberSchemaDiffVisitor.java @@ -34,7 +34,6 @@ public class NumberSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final NumberSchema original; @@ -56,37 +55,38 @@ public void visitRequiredInteger(boolean requiresInteger) { boolean originalRequiresInteger = original.requiresInteger(); if (original.getMultipleOf() != null) { - BigDecimal multipleOf = new BigDecimal(original.getMultipleOf().toString()); // Not pretty but it works:/ + BigDecimal multipleOf = new BigDecimal(original.getMultipleOf().toString()); // Not pretty but it + // works:/ BigDecimal one = new BigDecimal("1"); originalRequiresInteger = originalRequiresInteger || multipleOf.compareTo(one) == 0; } diffBooleanTransition(ctx.sub("type"), originalRequiresInteger, requiresInteger, false, - NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE, - NUMBER_TYPE_INTEGER_REQUIRED_TRUE_TO_FALSE, - NUMBER_TYPE_INTEGER_REQUIRED_UNCHANGED); + NUMBER_TYPE_INTEGER_REQUIRED_FALSE_TO_TRUE, NUMBER_TYPE_INTEGER_REQUIRED_TRUE_TO_FALSE, + NUMBER_TYPE_INTEGER_REQUIRED_UNCHANGED); super.visitRequiredInteger(requiresInteger); } @Override public void visitMinimum(Number minimum) { - boolean isOriginalMinimumExclusive = original.getExclusiveMinimumLimit() != null || original.isExclusiveMinimum(); - Number originalMinimum = original.getExclusiveMinimumLimit() != null ? original.getExclusiveMinimumLimit() : original.getMinimum(); - - boolean isUpdatedMinimumExclusive = schema.getExclusiveMinimumLimit() != null || schema.isExclusiveMinimum(); - Number updatedMinimum = schema.getExclusiveMinimumLimit() != null ? schema.getExclusiveMinimumLimit() : schema.getMinimum(); - - if (diffNumber(ctx.sub("minimum"), originalMinimum, updatedMinimum, - NUMBER_TYPE_MINIMUM_ADDED, - NUMBER_TYPE_MINIMUM_REMOVED, - NUMBER_TYPE_MINIMUM_INCREASED, - NUMBER_TYPE_MINIMUM_DECREASED)) { - - diffBooleanTransition(ctx.sub("exclusiveMinimum"), isOriginalMinimumExclusive, isUpdatedMinimumExclusive, false, - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_FALSE_TO_TRUE, - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_TRUE_TO_FALSE, - NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_UNCHANGED); + boolean isOriginalMinimumExclusive = original.getExclusiveMinimumLimit() != null + || original.isExclusiveMinimum(); + Number originalMinimum = original.getExclusiveMinimumLimit() != null + ? original.getExclusiveMinimumLimit() : original.getMinimum(); + + boolean isUpdatedMinimumExclusive = schema.getExclusiveMinimumLimit() != null + || schema.isExclusiveMinimum(); + Number updatedMinimum = schema.getExclusiveMinimumLimit() != null ? schema.getExclusiveMinimumLimit() + : schema.getMinimum(); + + if (diffNumber(ctx.sub("minimum"), originalMinimum, updatedMinimum, NUMBER_TYPE_MINIMUM_ADDED, + NUMBER_TYPE_MINIMUM_REMOVED, NUMBER_TYPE_MINIMUM_INCREASED, NUMBER_TYPE_MINIMUM_DECREASED)) { + + diffBooleanTransition(ctx.sub("exclusiveMinimum"), isOriginalMinimumExclusive, + isUpdatedMinimumExclusive, false, NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_FALSE_TO_TRUE, + NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_TRUE_TO_FALSE, + NUMBER_TYPE_IS_MINIMUM_EXCLUSIVE_UNCHANGED); } super.visitMinimum(minimum); @@ -109,22 +109,23 @@ public void visitExclusiveMinimumLimit(Number exclusiveMinimumLimit) { @Override public void visitMaximum(Number maximum) { - boolean isOriginalMaximumExclusive = original.getExclusiveMaximumLimit() != null || original.isExclusiveMaximum(); - Number originalMaximum = original.getExclusiveMaximumLimit() != null ? original.getExclusiveMaximumLimit() : original.getMaximum(); - - boolean isUpdatedMaximumExclusive = schema.getExclusiveMaximumLimit() != null || schema.isExclusiveMaximum(); - Number updatedMaximum = schema.getExclusiveMaximumLimit() != null ? schema.getExclusiveMaximumLimit() : schema.getMaximum(); - - if (diffNumber(ctx.sub("maximum"), originalMaximum, updatedMaximum, - NUMBER_TYPE_MAXIMUM_ADDED, - NUMBER_TYPE_MAXIMUM_REMOVED, - NUMBER_TYPE_MAXIMUM_INCREASED, - NUMBER_TYPE_MAXIMUM_DECREASED)) { - - diffBooleanTransition(ctx.sub("exclusiveMaximum"), isOriginalMaximumExclusive, isUpdatedMaximumExclusive, false, - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_FALSE_TO_TRUE, - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_TRUE_TO_FALSE, - NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_UNCHANGED); + boolean isOriginalMaximumExclusive = original.getExclusiveMaximumLimit() != null + || original.isExclusiveMaximum(); + Number originalMaximum = original.getExclusiveMaximumLimit() != null + ? original.getExclusiveMaximumLimit() : original.getMaximum(); + + boolean isUpdatedMaximumExclusive = schema.getExclusiveMaximumLimit() != null + || schema.isExclusiveMaximum(); + Number updatedMaximum = schema.getExclusiveMaximumLimit() != null ? schema.getExclusiveMaximumLimit() + : schema.getMaximum(); + + if (diffNumber(ctx.sub("maximum"), originalMaximum, updatedMaximum, NUMBER_TYPE_MAXIMUM_ADDED, + NUMBER_TYPE_MAXIMUM_REMOVED, NUMBER_TYPE_MAXIMUM_INCREASED, NUMBER_TYPE_MAXIMUM_DECREASED)) { + + diffBooleanTransition(ctx.sub("exclusiveMaximum"), isOriginalMaximumExclusive, + isUpdatedMaximumExclusive, false, NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_FALSE_TO_TRUE, + NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_TRUE_TO_FALSE, + NUMBER_TYPE_IS_MAXIMUM_EXCLUSIVE_UNCHANGED); } super.visitMaximum(maximum); @@ -145,12 +146,11 @@ public void visitExclusiveMaximumLimit(Number exclusiveMaximumLimit) { @Override public void visitMultipleOf(Number multipleOf) { DiffContext subCtx = ctx.sub("multipleOf"); - if (diffAddedRemoved(subCtx, original.getMultipleOf(), multipleOf, - NUMBER_TYPE_MULTIPLE_OF_ADDED, - NUMBER_TYPE_MULTIPLE_OF_REMOVED)) { + if (diffAddedRemoved(subCtx, original.getMultipleOf(), multipleOf, NUMBER_TYPE_MULTIPLE_OF_ADDED, + NUMBER_TYPE_MULTIPLE_OF_REMOVED)) { diffNumberOriginalMultipleOfUpdated(subCtx, original.getMultipleOf(), multipleOf, - NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_DIVISIBLE, - NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE); + NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_DIVISIBLE, + NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE); } super.visitMultipleOf(multipleOf); } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ObjectSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ObjectSchemaDiffVisitor.java index 5e41feb18e..3542ccc271 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ObjectSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ObjectSchemaDiffVisitor.java @@ -74,7 +74,6 @@ public class ObjectSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final ObjectSchema original; private ObjectSchemaWrapper schema; @@ -99,14 +98,10 @@ public void visitRequiredPropertyName(String requiredPropName) { @Override public void visitRequiredProperties(List requiredProperties) { - diffSetChanged(ctx.sub("required"), - new HashSet<>(original.getRequiredProperties()), - new HashSet<>(requiredProperties), - OBJECT_TYPE_REQUIRED_PROPERTIES_ADDED, - OBJECT_TYPE_REQUIRED_PROPERTIES_REMOVED, - OBJECT_TYPE_REQUIRED_PROPERTIES_CHANGED, - OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_ADDED, - OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_REMOVED); + diffSetChanged(ctx.sub("required"), new HashSet<>(original.getRequiredProperties()), + new HashSet<>(requiredProperties), OBJECT_TYPE_REQUIRED_PROPERTIES_ADDED, + OBJECT_TYPE_REQUIRED_PROPERTIES_REMOVED, OBJECT_TYPE_REQUIRED_PROPERTIES_CHANGED, + OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_ADDED, OBJECT_TYPE_REQUIRED_PROPERTIES_MEMBER_REMOVED); super.visitRequiredProperties(requiredProperties); } @@ -114,8 +109,7 @@ public void visitRequiredProperties(List requiredProperties) { public void visitPropertyNameSchema(SchemaWrapper propertyNameSchema) { DiffContext subCtx = ctx.sub("properties"); if (diffSubschemaAddedRemoved(subCtx, original.getPropertyNameSchema(), propertyNameSchema, - OBJECT_TYPE_PROPERTY_SCHEMA_ADDED, - OBJECT_TYPE_PROPERTY_SCHEMA_REMOVED)) { + OBJECT_TYPE_PROPERTY_SCHEMA_ADDED, OBJECT_TYPE_PROPERTY_SCHEMA_REMOVED)) { propertyNameSchema.accept(new SchemaDiffVisitor(subCtx, original.getPropertyNameSchema())); } super.visitPropertyNameSchema(propertyNameSchema); @@ -124,33 +118,27 @@ public void visitPropertyNameSchema(SchemaWrapper propertyNameSchema) { @Override public void visitMinProperties(Integer minProperties) { diffInteger(ctx.sub("minProperties"), original.getMinProperties(), minProperties, - OBJECT_TYPE_MIN_PROPERTIES_ADDED, - OBJECT_TYPE_MIN_PROPERTIES_REMOVED, - OBJECT_TYPE_MIN_PROPERTIES_INCREASED, - OBJECT_TYPE_MIN_PROPERTIES_DECREASED); + OBJECT_TYPE_MIN_PROPERTIES_ADDED, OBJECT_TYPE_MIN_PROPERTIES_REMOVED, + OBJECT_TYPE_MIN_PROPERTIES_INCREASED, OBJECT_TYPE_MIN_PROPERTIES_DECREASED); super.visitMinProperties(minProperties); } @Override public void visitMaxProperties(Integer maxProperties) { diffInteger(ctx.sub("maxProperties"), original.getMaxProperties(), maxProperties, - OBJECT_TYPE_MAX_PROPERTIES_ADDED, - OBJECT_TYPE_MAX_PROPERTIES_REMOVED, - OBJECT_TYPE_MAX_PROPERTIES_INCREASED, - OBJECT_TYPE_MAX_PROPERTIES_DECREASED); + OBJECT_TYPE_MAX_PROPERTIES_ADDED, OBJECT_TYPE_MAX_PROPERTIES_REMOVED, + OBJECT_TYPE_MAX_PROPERTIES_INCREASED, OBJECT_TYPE_MAX_PROPERTIES_DECREASED); super.visitMaxProperties(maxProperties); } @Override public void visitAllPropertyDependencies(Map> propertyDependencies) { - diffSetChanged(ctx.sub("dependencies"), - new HashSet<>(original.getPropertyDependencies().keySet()), - new HashSet<>(propertyDependencies.keySet()), - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_ADDED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_REMOVED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_CHANGED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_ADDED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_REMOVED); + diffSetChanged(ctx.sub("dependencies"), new HashSet<>(original.getPropertyDependencies().keySet()), + new HashSet<>(propertyDependencies.keySet()), OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_ADDED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_REMOVED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_CHANGED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_ADDED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_KEYS_MEMBER_REMOVED); super.visitAllPropertyDependencies(propertyDependencies); } @@ -158,30 +146,29 @@ public void visitAllPropertyDependencies(Map> propertyDepend public void visitPropertyDependencies(String ifPresent, Set allMustBePresent) { if (original.getPropertyDependencies().containsKey(ifPresent)) { diffSetChanged(ctx.sub("dependencies/" + ifPresent), - original.getPropertyDependencies().get(ifPresent), - allMustBePresent, - UNDEFINED_UNUSED, - UNDEFINED_UNUSED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_CHANGED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_ADDED, - OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_REMOVED); + original.getPropertyDependencies().get(ifPresent), allMustBePresent, UNDEFINED_UNUSED, + UNDEFINED_UNUSED, OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_CHANGED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_ADDED, + OBJECT_TYPE_PROPERTY_DEPENDENCIES_VALUE_MEMBER_REMOVED); } super.visitPropertyDependencies(ifPresent, allMustBePresent); } @Override public void visitAdditionalProperties(boolean permitsAdditionalProperties) { - if (diffBooleanTransition(ctx.sub("additionalProperties"), original.permitsAdditionalProperties(), permitsAdditionalProperties, true, - OBJECT_TYPE_ADDITIONAL_PROPERTIES_FALSE_TO_TRUE, + if (diffBooleanTransition(ctx.sub("additionalProperties"), original.permitsAdditionalProperties(), + permitsAdditionalProperties, true, OBJECT_TYPE_ADDITIONAL_PROPERTIES_FALSE_TO_TRUE, OBJECT_TYPE_ADDITIONAL_PROPERTIES_TRUE_TO_FALSE, OBJECT_TYPE_ADDITIONAL_PROPERTIES_BOOLEAN_UNCHANGED)) { if (permitsAdditionalProperties) { - Schema updatedAdditionalProperties = schema.getSchemaOfAdditionalProperties() == null ? null : - schema.getSchemaOfAdditionalProperties().getWrapped(); - diffSchemaOrTrue(ctx.sub("schemaOfAdditionalItems"), original.getSchemaOfAdditionalProperties(), - updatedAdditionalProperties, OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_UNCHANGED, - OBJECT_TYPE_ADDITIONAL_PROPERTIES_EXTENDED, OBJECT_TYPE_ADDITIONAL_PROPERTIES_NARROWED, + Schema updatedAdditionalProperties = schema.getSchemaOfAdditionalProperties() == null ? null + : schema.getSchemaOfAdditionalProperties().getWrapped(); + diffSchemaOrTrue(ctx.sub("schemaOfAdditionalItems"), + original.getSchemaOfAdditionalProperties(), updatedAdditionalProperties, + OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_UNCHANGED, + OBJECT_TYPE_ADDITIONAL_PROPERTIES_EXTENDED, + OBJECT_TYPE_ADDITIONAL_PROPERTIES_NARROWED, OBJECT_TYPE_ADDITIONAL_PROPERTIES_SCHEMA_CHANGED); } } @@ -198,13 +185,12 @@ public void visitSchemaOfAdditionalProperties(SchemaWrapper schemaOfAdditionalPr @Override public void visitPatternProperties(Map patternProperties) { diffSetChanged(ctx.sub("patternProperties"), - original.getPatternProperties().keySet().stream().map(Pattern::toString).collect(Collectors.toSet()), - patternProperties.keySet().stream().map(Pattern::toString).collect(Collectors.toSet()), - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_ADDED, - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_REMOVED, - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_CHANGED, - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_ADDED, - OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_REMOVED); + original.getPatternProperties().keySet().stream().map(Pattern::toString) + .collect(Collectors.toSet()), + patternProperties.keySet().stream().map(Pattern::toString).collect(Collectors.toSet()), + OBJECT_TYPE_PATTERN_PROPERTY_KEYS_ADDED, OBJECT_TYPE_PATTERN_PROPERTY_KEYS_REMOVED, + OBJECT_TYPE_PATTERN_PROPERTY_KEYS_CHANGED, OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_ADDED, + OBJECT_TYPE_PATTERN_PROPERTY_KEYS_MEMBER_REMOVED); super.visitPatternProperties(patternProperties); } @@ -212,25 +198,22 @@ public void visitPatternProperties(Map patternProperties @Override public void visitPatternPropertySchema(Pattern propertyNamePattern, SchemaWrapper schema) { final Map stringifiedOriginal = original.getPatternProperties().entrySet().stream() - .collect(toMap(e -> e.getKey().toString(), Entry::getValue)); // TODO maybe add a wrapper class for Pattern + .collect(toMap(e -> e.getKey().toString(), Entry::getValue)); // TODO maybe add a wrapper + // class for Pattern if (stringifiedOriginal.containsKey(propertyNamePattern.toString())) { schema.accept(new SchemaDiffVisitor(ctx.sub("patternProperties/" + propertyNamePattern), - stringifiedOriginal.get(propertyNamePattern.toString()))); + stringifiedOriginal.get(propertyNamePattern.toString()))); } super.visitPatternPropertySchema(propertyNamePattern, schema); } @Override public void visitSchemaDependencies(Map schemaDependencies) { - diffSetChanged(ctx.sub("dependencies"), - new HashSet<>(original.getSchemaDependencies().keySet()), - new HashSet<>(schemaDependencies.keySet()), - OBJECT_TYPE_SCHEMA_DEPENDENCIES_ADDED, - OBJECT_TYPE_SCHEMA_DEPENDENCIES_REMOVED, - OBJECT_TYPE_SCHEMA_DEPENDENCIES_CHANGED, - OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_ADDED, - OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_REMOVED); + diffSetChanged(ctx.sub("dependencies"), new HashSet<>(original.getSchemaDependencies().keySet()), + new HashSet<>(schemaDependencies.keySet()), OBJECT_TYPE_SCHEMA_DEPENDENCIES_ADDED, + OBJECT_TYPE_SCHEMA_DEPENDENCIES_REMOVED, OBJECT_TYPE_SCHEMA_DEPENDENCIES_CHANGED, + OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_ADDED, OBJECT_TYPE_SCHEMA_DEPENDENCIES_MEMBER_REMOVED); super.visitSchemaDependencies(schemaDependencies); } @@ -238,7 +221,7 @@ public void visitSchemaDependencies(Map schemaDependencie public void visitSchemaDependency(String propName, SchemaWrapper schema) { if (original.getSchemaDependencies().containsKey(propName)) { schema.accept(new SchemaDiffVisitor(ctx.sub("dependencies/" + propName), - original.getSchemaDependencies().get(propName))); // TODO null/invalid schema + original.getSchemaDependencies().get(propName))); // TODO null/invalid schema } super.visitSchemaDependency(propName, schema); } @@ -246,14 +229,16 @@ public void visitSchemaDependency(String propName, SchemaWrapper schema) { @Override public void visitPropertySchemas(Map propertySchemas) { @SuppressWarnings("serial") - Set allPropertySchemaNames = new HashSet() {{ - addAll(original.getPropertySchemas().keySet()); - addAll(schema.getPropertySchemas().keySet()); - }}; + Set allPropertySchemaNames = new HashSet() { + { + addAll(original.getPropertySchemas().keySet()); + addAll(schema.getPropertySchemas().keySet()); + } + }; List addedPropertySchemas = new ArrayList<>(); List removedPropertySchemas = new ArrayList<>(); - for (String propertySchemaName: allPropertySchemaNames) { + for (String propertySchemaName : allPropertySchemaNames) { boolean existInOriginal = original.getPropertySchemas().containsKey(propertySchemaName); boolean existInUpdated = propertySchemas.containsKey(propertySchemaName); if (!existInOriginal && existInUpdated) { @@ -285,17 +270,15 @@ public void visitPropertySchemas(Map propertySchemas) { public void visitPropertySchema(String propertyName, SchemaWrapper schema) { if (original.getPropertySchemas().containsKey(propertyName)) { Schema originalPropertySchema = original.getPropertySchemas().get(propertyName); - if (originalPropertySchema instanceof StringSchema - && schema instanceof CombinedSchemaWrapper) { - originalPropertySchema = CombinedSchema - .builder() - .criterion(CombinedSchema.ANY_CRITERION) - .subschema(originalPropertySchema) - .build(); + if (originalPropertySchema instanceof StringSchema && schema instanceof CombinedSchemaWrapper) { + originalPropertySchema = CombinedSchema.builder().criterion(CombinedSchema.ANY_CRITERION) + .subschema(originalPropertySchema).build(); } - schema.accept(new SchemaDiffVisitor(ctx.sub("properties/" + propertyName), - originalPropertySchema)); // TODO null/invalid schema + schema.accept( + new SchemaDiffVisitor(ctx.sub("properties/" + propertyName), originalPropertySchema)); // TODO + // null/invalid + // schema } super.visitPropertySchema(propertyName, schema); } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/PrimitiveSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/PrimitiveSchemaDiffVisitor.java index 342e78e5f3..2c65e89059 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/PrimitiveSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/PrimitiveSchemaDiffVisitor.java @@ -28,18 +28,15 @@ * *

* "True" and "Empty" schemas are equivalent, each are not equivalent with "False" schema. - * */ public class PrimitiveSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final Schema original; /** - * This visitor accepts any schema, so the checks - * that would be otherwise done by the caller, - * are made by this visitor. + * This visitor accepts any schema, so the checks that would be otherwise done by the caller, are made by + * this visitor. */ public PrimitiveSchemaDiffVisitor(DiffContext ctx, Schema original) { this.ctx = ctx; @@ -49,7 +46,7 @@ public PrimitiveSchemaDiffVisitor(DiffContext ctx, Schema original) { private void emptyTrueSchema(SchemaWrapper wrapper) { // This is spelled explicitly for clarity, and in case the library changes. if (!(EmptySchema.INSTANCE.equals(original) // || - // TrueSchema.INSTANCE.equals(original) + // TrueSchema.INSTANCE.equals(original) )) { ctx.addDifference(SUBSCHEMA_TYPE_CHANGED_TO_EMPTY_OR_TRUE, original, wrapper); // Change to empty schema is backwards compatible diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ReferenceSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ReferenceSchemaDiffVisitor.java index 409f52fb10..6c8670ff55 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ReferenceSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/ReferenceSchemaDiffVisitor.java @@ -12,7 +12,6 @@ public class ReferenceSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private DiffContext ctx; private final Schema referredOriginal; @@ -27,7 +26,8 @@ public ReferenceSchemaDiffVisitor(DiffContext ctx, Schema original) { @Override public void visitReferenceSchema(ReferenceSchemaWrapper referenceSchema) { - // TODO Can't use the schema itself, hashCode & equals would cause StackOverflowError, report a bug to te library + // TODO Can't use the schema itself, hashCode & equals would cause StackOverflowError, report a bug to + // te library if (!ctx.visited.contains(referenceSchema.getLocation())) { ctx.visited.add(referenceSchema.getLocation()); ctx = ctx.sub("[ref " + referenceSchema.getLocation() + "]"); @@ -39,9 +39,8 @@ public void visitReferenceSchema(ReferenceSchemaWrapper referenceSchema) { @Override public void visitReferredSchema(SchemaWrapper schema) { - if (diffSubschemaAddedRemoved(ctx, referredOriginal, schema, - REFERENCE_TYPE_TARGET_SCHEMA_ADDED, - REFERENCE_TYPE_TARGET_SCHEMA_REMOVED)) { + if (diffSubschemaAddedRemoved(ctx, referredOriginal, schema, REFERENCE_TYPE_TARGET_SCHEMA_ADDED, + REFERENCE_TYPE_TARGET_SCHEMA_REMOVED)) { schema.accept(new SchemaDiffVisitor(ctx, referredOriginal)); } super.visitReferredSchema(schema); diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/SchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/SchemaDiffVisitor.java index e0dd794f7e..0be54b07f8 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/SchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/SchemaDiffVisitor.java @@ -50,11 +50,11 @@ public SchemaDiffVisitor(DiffContext ctx, Schema original) { } /** - * In case of e.g. enum of strings (with type property defined as "string"), - * the schema is not an EnumSchema or a StringSchema, but a CombinedSchema of both. + * In case of e.g. enum of strings (with type property defined as "string"), the schema is not an + * EnumSchema or a StringSchema, but a CombinedSchema of both. *

- * If original is Combined and updated is not, the backwards compatibility is - * satisfied iff the Combined schema contains a schema that is compatible with updated (and their type matches). + * If original is Combined and updated is not, the backwards compatibility is satisfied iff the Combined + * schema contains a schema that is compatible with updated (and their type matches). *

* This should only work for allOf criterion however. */ @@ -63,9 +63,9 @@ private Schema getCompatibleSubschemaOrOriginal(Schema original, SchemaWrapper u requireNonNull(updated); if (original instanceof CombinedSchema) { Set typeCompatible = ((CombinedSchema) original).getSubschemas().stream() - .filter(s -> s.getClass().isInstance(updated.getWrapped())) - .collect(Collectors.toSet()); - if (ALL_CRITERION.equals(((CombinedSchema) original).getCriterion()) && typeCompatible.size() == 1) + .filter(s -> s.getClass().isInstance(updated.getWrapped())).collect(Collectors.toSet()); + if (ALL_CRITERION.equals(((CombinedSchema) original).getCriterion()) + && typeCompatible.size() == 1) return typeCompatible.stream().findAny().get(); } return original; @@ -163,9 +163,7 @@ public void visitConstSchema(ConstSchemaWrapper schema) { if (orig instanceof EnumSchema) { Set possibleValues = ((EnumSchema) orig).getPossibleValues(); if (possibleValues.size() == 1) { - orig = ConstSchema.builder() - .permittedValue(possibleValues.stream().findAny().get()) - .build(); + orig = ConstSchema.builder().permittedValue(possibleValues.stream().findAny().get()).build(); } } @@ -186,9 +184,7 @@ public void visitEnumSchema(EnumSchemaWrapper schema) { // Const and single-enum equivalency if (orig instanceof ConstSchema) { Object permittedValue = ((ConstSchema) orig).getPermittedValue(); - orig = EnumSchema.builder() - .possibleValue(permittedValue) - .build(); + orig = EnumSchema.builder().possibleValue(permittedValue).build(); } if (!(orig instanceof EnumSchema)) { diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/StringSchemaDiffVisitor.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/StringSchemaDiffVisitor.java index 40c4881f29..48d71c297f 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/StringSchemaDiffVisitor.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/diff/StringSchemaDiffVisitor.java @@ -34,7 +34,6 @@ public class StringSchemaDiffVisitor extends JsonSchemaWrapperVisitor { - private final DiffContext ctx; private final StringSchema original; @@ -47,23 +46,22 @@ public StringSchemaDiffVisitor(DiffContext ctx, StringSchema original) { public void visitStringSchema(StringSchemaWrapper stringSchema) { ctx.log("Visiting " + stringSchema + " at " + stringSchema.getWrapped().getLocation()); - // Process "contentEncoding" and "contentMediaType" which are at the moment stored as unprocessed properties + // Process "contentEncoding" and "contentMediaType" which are at the moment stored as unprocessed + // properties Map originalUnprocessed = original.getUnprocessedProperties(); Map updatedUnprocessed = stringSchema.getUnprocessedProperties(); // "contentEncoding" DiffContext subCtx = ctx.sub("contentEncoding"); diffObject(subCtx, getExceptionally(subCtx, () -> originalUnprocessed.get("contentEncoding")), - getExceptionally(subCtx, () -> updatedUnprocessed.get("contentEncoding")), - STRING_TYPE_CONTENT_ENCODING_ADDED, - STRING_TYPE_CONTENT_ENCODING_REMOVED, - STRING_TYPE_CONTENT_ENCODING_CHANGED); + getExceptionally(subCtx, () -> updatedUnprocessed.get("contentEncoding")), + STRING_TYPE_CONTENT_ENCODING_ADDED, STRING_TYPE_CONTENT_ENCODING_REMOVED, + STRING_TYPE_CONTENT_ENCODING_CHANGED); // "contentMediaType" subCtx = ctx.sub("contentMediaType"); diffObject(subCtx, getExceptionally(subCtx, () -> originalUnprocessed.get("contentMediaType")), - getExceptionally(subCtx, () -> updatedUnprocessed.get("contentMediaType")), - STRING_TYPE_CONTENT_MEDIA_TYPE_ADDED, - STRING_TYPE_CONTENT_MEDIA_TYPE_REMOVED, - STRING_TYPE_CONTENT_MEDIA_TYPE_CHANGED); + getExceptionally(subCtx, () -> updatedUnprocessed.get("contentMediaType")), + STRING_TYPE_CONTENT_MEDIA_TYPE_ADDED, STRING_TYPE_CONTENT_MEDIA_TYPE_REMOVED, + STRING_TYPE_CONTENT_MEDIA_TYPE_CHANGED); super.visitStringSchema(stringSchema); } @@ -71,22 +69,18 @@ public void visitStringSchema(StringSchemaWrapper stringSchema) { @Override public void visitMinLength(Integer minLength) { ctx.log("Visiting minLength: " + minLength); - diffInteger(ctx.sub("minLength"), original.getMinLength(), minLength, - STRING_TYPE_MIN_LENGTH_ADDED, - STRING_TYPE_MIN_LENGTH_REMOVED, - STRING_TYPE_MIN_LENGTH_INCREASED, - STRING_TYPE_MIN_LENGTH_DECREASED); + diffInteger(ctx.sub("minLength"), original.getMinLength(), minLength, STRING_TYPE_MIN_LENGTH_ADDED, + STRING_TYPE_MIN_LENGTH_REMOVED, STRING_TYPE_MIN_LENGTH_INCREASED, + STRING_TYPE_MIN_LENGTH_DECREASED); super.visitMinLength(minLength); } @Override public void visitMaxLength(Integer maxLength) { ctx.log("Visiting maxLength " + maxLength); - diffInteger(ctx.sub("maxLength"), original.getMaxLength(), maxLength, - STRING_TYPE_MAX_LENGTH_ADDED, - STRING_TYPE_MAX_LENGTH_REMOVED, - STRING_TYPE_MAX_LENGTH_INCREASED, - STRING_TYPE_MAX_LENGTH_DECREASED); + diffInteger(ctx.sub("maxLength"), original.getMaxLength(), maxLength, STRING_TYPE_MAX_LENGTH_ADDED, + STRING_TYPE_MAX_LENGTH_REMOVED, STRING_TYPE_MAX_LENGTH_INCREASED, + STRING_TYPE_MAX_LENGTH_DECREASED); super.visitMaxLength(maxLength); } @@ -96,9 +90,7 @@ public void visitPattern(Pattern pattern) { // careful with the pattern wrappers DiffContext subCtx = ctx.sub("pattern"); diffObject(subCtx, getExceptionally(subCtx, () -> original.getPattern().pattern()), pattern.pattern(), - STRING_TYPE_PATTERN_ADDED, - STRING_TYPE_PATTERN_REMOVED, - STRING_TYPE_PATTERN_CHANGED); + STRING_TYPE_PATTERN_ADDED, STRING_TYPE_PATTERN_REMOVED, STRING_TYPE_PATTERN_CHANGED); super.visitPattern(pattern); } @@ -107,10 +99,8 @@ public void visitFormat(String formatName) { ctx.log("Visiting formatValidator " + formatName); DiffContext subCtx = ctx.sub("format"); diffObjectDefault(subCtx, getExceptionally(subCtx, () -> original.getFormatValidator().formatName()), - formatName, "unnamed-format", - STRING_TYPE_FORMAT_ADDED, - STRING_TYPE_FORMAT_REMOVED, - STRING_TYPE_FORMAT_CHANGED); + formatName, "unnamed-format", STRING_TYPE_FORMAT_ADDED, STRING_TYPE_FORMAT_REMOVED, + STRING_TYPE_FORMAT_CHANGED); super.visitFormat(formatName); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/EqualitySchemaWrapper.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/EqualitySchemaWrapper.java index cef02cafb7..bb97712360 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/EqualitySchemaWrapper.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/EqualitySchemaWrapper.java @@ -12,12 +12,9 @@ import static io.apicurio.registry.rules.compatibility.jsonschema.JsonUtil.MAPPER; /** - * Equals and hashCode implementation from the Everit library - * may not handle some schemas (with references) well, - * resulting in {@link StackOverflowError} or other errors. - * When using collections, always wrap the schema inside this wrapper, - * or any other wrapper that inherits from this one. - * + * Equals and hashCode implementation from the Everit library may not handle some schemas (with references) + * well, resulting in {@link StackOverflowError} or other errors. When using collections, always wrap the + * schema inside this wrapper, or any other wrapper that inherits from this one. */ // TODO Should implement SchemaWrapper? public class EqualitySchemaWrapper implements SchemaWrapper { @@ -32,8 +29,10 @@ public EqualitySchemaWrapper(Schema wrapped) { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; EqualitySchemaWrapper that = (EqualitySchemaWrapper) o; @@ -45,7 +44,8 @@ public boolean equals(Object o) { return thisWrappedNode.equals(thatWrappedNode); } catch (IOException ex) { - throw new RuntimeException("Could not perform equality comparison on this " + this + " and that " + that, ex); + throw new RuntimeException( + "Could not perform equality comparison on this " + this + " and that " + that, ex); } } diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/ObjectSchemaWrapper.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/ObjectSchemaWrapper.java index 95d8ca0413..8effa6d58c 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/ObjectSchemaWrapper.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/ObjectSchemaWrapper.java @@ -56,7 +56,6 @@ public Map getRegexpPatternProperties() { return wrap(wrapped.getPatternProperties()); // TODO Possible deprecation issue } - public Map getSchemaDependencies() { return wrap(wrapped.getSchemaDependencies()); } @@ -65,7 +64,6 @@ public Map getPropertySchemas() { return wrap(wrapped.getPropertySchemas()); } - @Override public void accept(JsonSchemaWrapperVisitor visitor) { visitor.visitObjectSchema(this); diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/WrapUtil.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/WrapUtil.java index f8a578fe70..869d25ecb7 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/WrapUtil.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/compatibility/jsonschema/wrapper/WrapUtil.java @@ -30,7 +30,6 @@ public class WrapUtil { - public static SchemaWrapper wrap(Schema schema) { if (schema == null) return null; @@ -66,7 +65,8 @@ public static SchemaWrapper wrap(Schema schema) { } else if (schema instanceof NumberSchema) { return new NumberSchemaWrapper((NumberSchema) schema); } else { - throw new IllegalStateException("No wrapper for an underlying schema type '" + schema.getClass() + "': " + schema); + throw new IllegalStateException( + "No wrapper for an underlying schema type '" + schema.getClass() + "': " + schema); } } @@ -79,11 +79,8 @@ public static List wrap(List itemSchemas) { public static Map wrap(Map map) { requireNonNull(map); return map.entrySet().stream() - //.map(entry -> new SimpleEntry<>(entry.getKey(), wrap(entry.getValue()))) - .collect(toMap( - Entry::getKey, - e -> wrap(e.getValue()) - )); + // .map(entry -> new SimpleEntry<>(entry.getKey(), wrap(entry.getValue()))) + .collect(toMap(Entry::getKey, e -> wrap(e.getValue()))); } public static Collection wrap(Collection subschemas) { diff --git a/schema-util/json/src/main/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidator.java b/schema-util/json/src/main/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidator.java index e7c43866b3..6330393042 100644 --- a/schema-util/json/src/main/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidator.java +++ b/schema-util/json/src/main/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidator.java @@ -1,6 +1,5 @@ package io.apicurio.registry.rules.validity; - import com.fasterxml.jackson.databind.ObjectMapper; import io.apicurio.registry.content.TypedContent; import io.apicurio.registry.rest.v3.beans.ArtifactReference; @@ -28,15 +27,17 @@ public JsonSchemaContentValidator() { } /** - * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) + * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY) { try { objectMapper.readTree(content.getContent().bytes()); } catch (Exception e) { - throw new RuleViolationException("Syntax violation for JSON Schema artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for JSON Schema artifact.", + RuleType.VALIDITY, level.name(), e); } } else if (level == ValidityLevel.FULL) { try { @@ -48,21 +49,22 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { // TODO Implement this for JSON Schema! } } diff --git a/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaCompatibilityCheckerTest.java b/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaCompatibilityCheckerTest.java index 30ad940858..d28672f34f 100644 --- a/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaCompatibilityCheckerTest.java +++ b/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaCompatibilityCheckerTest.java @@ -11,64 +11,51 @@ public class JsonSchemaCompatibilityCheckerTest { - private TypedContent toTypedContent(String content) { - return TypedContent.create(ContentHandle.create(content), ContentTypes.APPLICATION_JSON); - } + private TypedContent toTypedContent(String content) { + return TypedContent.create(ContentHandle.create(content), ContentTypes.APPLICATION_JSON); + } - private static final String BEFORE = "{\r\n" - + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" - + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" - + " \"title\": \"Test JSON Schema\",\r\n" - + " \"description\": \"\",\r\n" - + " \"type\": \"object\",\r\n" - + " \"properties\": {}\r\n" - + "}"; - private static final String AFTER_VALID = "{\r\n" - + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" - + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" - + " \"title\": \"Test JSON Schema\",\r\n" - + " \"description\": \"A simple description added.\",\r\n" - + " \"type\": \"object\",\r\n" - + " \"properties\": {}\r\n" - + "}"; - private static final String AFTER_INVALID = "{\r\n" - + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" - + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" - + " \"title\": \"Test JSON Schema\",\r\n" - + " \"description\": \"\",\r\n" - + " \"type\": \"object\",\r\n" - + " \"properties\": {\r\n" - + " \"firstName\": {\r\n" - + " \"type\": \"string\",\r\n" - + " \"description\": \"The person's first name.\"\r\n" - + " },\r\n" - + " \"lastName\": {\r\n" - + " \"type\": \"string\",\r\n" - + " \"description\": \"The person's last name.\"\r\n" - + " },\r\n" - + " \"age\": {\r\n" - + " \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n" - + " \"type\": \"integer\",\r\n" - + " \"minimum\": 0\r\n" - + " }\r\n" - + " }\r\n" - + "}"; - + private static final String BEFORE = "{\r\n" + + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" + + " \"title\": \"Test JSON Schema\",\r\n" + " \"description\": \"\",\r\n" + + " \"type\": \"object\",\r\n" + " \"properties\": {}\r\n" + "}"; + private static final String AFTER_VALID = "{\r\n" + + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" + + " \"title\": \"Test JSON Schema\",\r\n" + + " \"description\": \"A simple description added.\",\r\n" + " \"type\": \"object\",\r\n" + + " \"properties\": {}\r\n" + "}"; + private static final String AFTER_INVALID = "{\r\n" + + " \"$id\": \"https://example.com/blank.schema.json\",\r\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\r\n" + + " \"title\": \"Test JSON Schema\",\r\n" + " \"description\": \"\",\r\n" + + " \"type\": \"object\",\r\n" + " \"properties\": {\r\n" + " \"firstName\": {\r\n" + + " \"type\": \"string\",\r\n" + + " \"description\": \"The person's first name.\"\r\n" + " },\r\n" + + " \"lastName\": {\r\n" + " \"type\": \"string\",\r\n" + + " \"description\": \"The person's last name.\"\r\n" + " },\r\n" + + " \"age\": {\r\n" + + " \"description\": \"Age in years which must be equal to or greater than zero.\",\r\n" + + " \"type\": \"integer\",\r\n" + " \"minimum\": 0\r\n" + " }\r\n" + + " }\r\n" + "}"; @Test - public void testJsonSchemaCompatibilityChecker() { - JsonSchemaCompatibilityChecker checker = new JsonSchemaCompatibilityChecker(); - TypedContent existing = toTypedContent(BEFORE); - TypedContent proposed = toTypedContent(AFTER_VALID); - checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(existing), proposed, Collections.emptyMap()); - } + public void testJsonSchemaCompatibilityChecker() { + JsonSchemaCompatibilityChecker checker = new JsonSchemaCompatibilityChecker(); + TypedContent existing = toTypedContent(BEFORE); + TypedContent proposed = toTypedContent(AFTER_VALID); + checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(existing), proposed, + Collections.emptyMap()); + } @Test - public void testJsonSchemaCompatibilityChecker_Fail() { - JsonSchemaCompatibilityChecker checker = new JsonSchemaCompatibilityChecker(); - TypedContent existing = toTypedContent(BEFORE); - TypedContent proposed = toTypedContent(AFTER_INVALID); - checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(existing), proposed, Collections.emptyMap()); - } - + public void testJsonSchemaCompatibilityChecker_Fail() { + JsonSchemaCompatibilityChecker checker = new JsonSchemaCompatibilityChecker(); + TypedContent existing = toTypedContent(BEFORE); + TypedContent proposed = toTypedContent(AFTER_INVALID); + checker.testCompatibility(CompatibilityLevel.BACKWARD, Collections.singletonList(existing), proposed, + Collections.emptyMap()); + } + } diff --git a/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffUtilTest.java b/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffUtilTest.java index d89ae87878..6c8249adfc 100644 --- a/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffUtilTest.java +++ b/schema-util/json/src/test/java/io/apicurio/registry/rules/compatibility/jsonschema/JsonSchemaDiffUtilTest.java @@ -13,31 +13,19 @@ public class JsonSchemaDiffUtilTest { public static Stream multipleOfCases() { - return Stream.of( - Arguments.of(10, 5, false), - Arguments.of(10.0, 5, false), - Arguments.of(10.0, 5.0, false), - Arguments.of(10.0, 10, false), - Arguments.of(10.0, 10.0, false), - Arguments.of(10.1, 10, true), - Arguments.of(13, 5, true), - Arguments.of(13.0, 5, true), - Arguments.of(13, 5.0, true) - ); + return Stream.of(Arguments.of(10, 5, false), Arguments.of(10.0, 5, false), + Arguments.of(10.0, 5.0, false), Arguments.of(10.0, 10, false), + Arguments.of(10.0, 10.0, false), Arguments.of(10.1, 10, true), Arguments.of(13, 5, true), + Arguments.of(13.0, 5, true), Arguments.of(13, 5.0, true)); } - @ParameterizedTest @MethodSource("multipleOfCases") public void multipleOfDivisibility(Number original, Number updated, boolean isIncompatible) { DiffContext context = DiffContext.createRootContext(); - DiffUtil.diffNumberOriginalMultipleOfUpdated( - context, - original, - updated, + DiffUtil.diffNumberOriginalMultipleOfUpdated(context, original, updated, DiffType.NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_DIVISIBLE, - DiffType.NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE - ); + DiffType.NUMBER_TYPE_MULTIPLE_OF_UPDATED_IS_NOT_DIVISIBLE); assertEquals(context.foundIncompatibleDifference(), isIncompatible); } } diff --git a/schema-util/kconnect/pom.xml b/schema-util/kconnect/pom.xml index e4b5bdfde5..d7cc8e87c4 100644 --- a/schema-util/kconnect/pom.xml +++ b/schema-util/kconnect/pom.xml @@ -1,57 +1,55 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-kconnect - jar - apicurio-registry-schema-util-kconnect + apicurio-registry-schema-util-kconnect + jar + apicurio-registry-schema-util-kconnect - + - - io.apicurio - apicurio-registry-schema-util-common - - - - io.apicurio - apicurio-registry-schema-util-json - + + io.apicurio + apicurio-registry-schema-util-common + - - org.apache.kafka - connect-json - - - + + io.apicurio + apicurio-registry-schema-util-json + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + org.apache.kafka + connect-json + + + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/kconnect/src/main/java/io/apicurio/registry/content/canon/KafkaConnectContentCanonicalizer.java b/schema-util/kconnect/src/main/java/io/apicurio/registry/content/canon/KafkaConnectContentCanonicalizer.java index abbe53b858..009f464d7f 100644 --- a/schema-util/kconnect/src/main/java/io/apicurio/registry/content/canon/KafkaConnectContentCanonicalizer.java +++ b/schema-util/kconnect/src/main/java/io/apicurio/registry/content/canon/KafkaConnectContentCanonicalizer.java @@ -2,7 +2,6 @@ /** * A Kafka Connect schema content canonicalizer. - * */ public class KafkaConnectContentCanonicalizer extends JsonContentCanonicalizer { diff --git a/schema-util/kconnect/src/main/java/io/apicurio/registry/rules/validity/KafkaConnectContentValidator.java b/schema-util/kconnect/src/main/java/io/apicurio/registry/rules/validity/KafkaConnectContentValidator.java index c7f188ffcf..f5af31913e 100644 --- a/schema-util/kconnect/src/main/java/io/apicurio/registry/rules/validity/KafkaConnectContentValidator.java +++ b/schema-util/kconnect/src/main/java/io/apicurio/registry/rules/validity/KafkaConnectContentValidator.java @@ -39,13 +39,15 @@ public KafkaConnectContentValidator() { * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { JsonNode jsonNode = mapper.readTree(content.getContent().content()); jsonConverter.asConnectSchema(jsonNode); } catch (Exception e) { - throw new RuleViolationException("Syntax violation for Kafka Connect Schema artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for Kafka Connect Schema artifact.", + RuleType.VALIDITY, level.name(), e); } } } @@ -54,7 +56,8 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { // Note: not yet implemented! } diff --git a/schema-util/openapi/pom.xml b/schema-util/openapi/pom.xml index 84630762a0..152aec0af7 100644 --- a/schema-util/openapi/pom.xml +++ b/schema-util/openapi/pom.xml @@ -1,57 +1,55 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-openapi - jar - apicurio-registry-schema-util-openapi + apicurio-registry-schema-util-openapi + jar + apicurio-registry-schema-util-openapi - + - - io.apicurio - apicurio-registry-schema-util-common - - - - io.apicurio - apicurio-data-models - + + io.apicurio + apicurio-registry-schema-util-common + - - org.slf4j - slf4j-api - + + io.apicurio + apicurio-data-models + - + + org.slf4j + slf4j-api + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/canon/OpenApiContentCanonicalizer.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/canon/OpenApiContentCanonicalizer.java index d365945492..5dfcbf3666 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/canon/OpenApiContentCanonicalizer.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/canon/OpenApiContentCanonicalizer.java @@ -11,13 +11,13 @@ import java.util.Map; /** - * An OpenAPI content canonicalizer. This will remove any extra formatting such as whitespace - * and also sort all fields/properties for all objects (because ordering of properties does not - * matter). + * An OpenAPI content canonicalizer. This will remove any extra formatting such as whitespace and also sort + * all fields/properties for all objects (because ordering of properties does not matter). */ public class OpenApiContentCanonicalizer implements ContentCanonicalizer { - private final ObjectMapper mapper = new ObjectMapper().enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); + private final ObjectMapper mapper = new ObjectMapper() + .enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); /** * @see ContentCanonicalizer#canonicalize(TypedContent, Map) diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ApicurioDataModelsContentDereferencer.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ApicurioDataModelsContentDereferencer.java index a07a3f0cad..1b5844ab37 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ApicurioDataModelsContentDereferencer.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ApicurioDataModelsContentDereferencer.java @@ -24,14 +24,16 @@ public TypedContent dereference(TypedContent content, Map IReferenceResolver resolver = new RegistryReferenceResolver(resolvedReferences); Document dereferencedDoc = Library.dereferenceDocument(document, resolver, false); String dereferencedContentStr = Library.writeDocumentToJSONString(dereferencedDoc); - return TypedContent.create(ContentHandle.create(dereferencedContentStr), ContentTypes.APPLICATION_JSON); + return TypedContent.create(ContentHandle.create(dereferencedContentStr), + ContentTypes.APPLICATION_JSON); } catch (IOException e) { throw new RuntimeException(e); } } - + /** - * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, java.util.Map) + * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(io.apicurio.registry.content.TypedContent, + * java.util.Map) */ @Override public TypedContent rewriteReferences(TypedContent content, Map resolvedReferenceUrls) { @@ -40,7 +42,8 @@ public TypedContent rewriteReferences(TypedContent content, Map Document doc = Library.readDocument((ObjectNode) node); ReferenceRewriter visitor = new ReferenceRewriter(resolvedReferenceUrls); Library.visitTree(doc, visitor, TraverserDirection.down); - return TypedContent.create(ContentHandle.create(Library.writeDocumentToJSONString(doc)), ContentTypes.APPLICATION_JSON); + return TypedContent.create(ContentHandle.create(Library.writeDocumentToJSONString(doc)), + ContentTypes.APPLICATION_JSON); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ReferenceRewriter.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ReferenceRewriter.java index 368eb28e79..566d0a71df 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ReferenceRewriter.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/ReferenceRewriter.java @@ -13,7 +13,7 @@ * Rewrites all references in a data model using a map of replacements provided. */ public class ReferenceRewriter extends AllNodeVisitor { - + private final Map referenceUrls; /** @@ -35,21 +35,21 @@ protected void visitNode(Node node) { } } } - + /** * @see io.apicurio.datamodels.models.visitors.AllNodeVisitor#visitMessage(io.apicurio.datamodels.models.asyncapi.AsyncApiMessage) */ @Override public void visitMessage(AsyncApiMessage node) { super.visitMessage(node); - + // Note: for now we have special handling of the payload because it's not yet fully modeled in the // apicurio-data-models library. JsonNode payload = node.getPayload(); if (payload != null && payload.hasNonNull("$ref")) { String $ref = payload.get("$ref").asText(); if (referenceUrls.containsKey($ref)) { - ((ObjectNode) payload).put("$ref", referenceUrls.get($ref)); + ((ObjectNode) payload).put("$ref", referenceUrls.get($ref)); } } } diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/RegistryReferenceResolver.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/RegistryReferenceResolver.java index b6c6a80787..321a37db73 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/RegistryReferenceResolver.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/dereference/RegistryReferenceResolver.java @@ -14,11 +14,12 @@ import java.util.Map; public class RegistryReferenceResolver extends LocalReferenceResolver { - + private final Map resolvedReferences; /** * Constructor. + * * @param resolvedReferences */ public RegistryReferenceResolver(Map resolvedReferences) { @@ -26,7 +27,8 @@ public RegistryReferenceResolver(Map resolvedReferences) { } /** - * @see io.apicurio.datamodels.refs.IReferenceResolver#resolveRef(java.lang.String, io.apicurio.datamodels.models.Node) + * @see io.apicurio.datamodels.refs.IReferenceResolver#resolveRef(java.lang.String, + * io.apicurio.datamodels.models.Node) */ @Override public Node resolveRef(String reference, Node from) { @@ -37,7 +39,8 @@ public Node resolveRef(String reference, Node from) { Document resolvedRefDoc = Library.readDocument((ObjectNode) node); JsonPointerExternalReference ref = new JsonPointerExternalReference(reference); return super.resolveRef(ref.getComponent(), resolvedRefDoc); - // TODO if we find a Node, make sure to modify it by updating all of its $ref values to point to appropriate locations + // TODO if we find a Node, make sure to modify it by updating all of its $ref values to point + // to appropriate locations } // TODO handle recursive $ref values (refs from refs) diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/extract/ApicurioDataModelsContentExtractor.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/extract/ApicurioDataModelsContentExtractor.java index 0c302bd0f2..3dedadc898 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/extract/ApicurioDataModelsContentExtractor.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/extract/ApicurioDataModelsContentExtractor.java @@ -1,14 +1,13 @@ package io.apicurio.registry.content.extract; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import io.apicurio.datamodels.Library; import io.apicurio.datamodels.TraverserDirection; import io.apicurio.datamodels.models.Document; import io.apicurio.datamodels.models.Info; import io.apicurio.datamodels.models.visitors.CombinedVisitorAdapter; import io.apicurio.registry.content.ContentHandle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs meta-data extraction for OpenAPI content. diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/AbstractDataModelsReferenceFinder.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/AbstractDataModelsReferenceFinder.java index c4e58125fc..dafc1613fd 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/AbstractDataModelsReferenceFinder.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/AbstractDataModelsReferenceFinder.java @@ -18,10 +18,9 @@ import java.util.stream.Collectors; /** - * Implementation of a reference finder that uses Apicurio Data Models and so supports any specification - * contained therein. Parses the document, finds all $refs, converts them to external references, and - * returns them. - * + * Implementation of a reference finder that uses Apicurio Data Models and so supports any specification + * contained therein. Parses the document, finds all $refs, converts them to external references, and returns + * them. */ public abstract class AbstractDataModelsReferenceFinder implements ReferenceFinder { @@ -39,20 +38,18 @@ public Set findExternalReferences(TypedContent content) { Library.visitTree(doc, visitor, TraverserDirection.down); // Convert to ExternalReference and filter. - return visitor.allReferences.stream() - .map(ref -> new JsonPointerExternalReference(ref)) - .filter(ref -> ref.getResource() != null) - .collect(Collectors.toSet()); + return visitor.allReferences.stream().map(ref -> new JsonPointerExternalReference(ref)) + .filter(ref -> ref.getResource() != null).collect(Collectors.toSet()); } catch (IOException e) { throw new RuntimeException(e); } } - + /** * Visitor that will visit every node looking for "$ref" properties. */ private static class RefFinderVisitor extends AllNodeVisitor { - + public Set allReferences = new HashSet<>(); /** @@ -67,13 +64,14 @@ protected void visitNode(Node node) { } } } - + /** * @see io.apicurio.datamodels.models.visitors.AllNodeVisitor#visitMessage(io.apicurio.datamodels.models.asyncapi.AsyncApiMessage) */ @Override public void visitMessage(AsyncApiMessage node) { - // Note: special handling of message payloads because data-models doesn't fully model the payload yet. + // Note: special handling of message payloads because data-models doesn't fully model the payload + // yet. JsonNode payload = node.getPayload(); if (payload != null && payload.has("$ref") && !payload.get("$ref").isNull()) { String ref = payload.get("$ref").asText(); @@ -81,7 +79,7 @@ public void visitMessage(AsyncApiMessage node) { } super.visitMessage(node); } - + } } diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/OpenApiReferenceFinder.java b/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/OpenApiReferenceFinder.java index f9ace67c67..051e16d475 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/OpenApiReferenceFinder.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/content/refs/OpenApiReferenceFinder.java @@ -1,7 +1,7 @@ package io.apicurio.registry.content.refs; /** - * OpenAPI implementation of a reference finder. Parses the OpenAPI document, finds all $refs, converts them + * OpenAPI implementation of a reference finder. Parses the OpenAPI document, finds all $refs, converts them * to external references, and returns them. */ public class OpenApiReferenceFinder extends AbstractDataModelsReferenceFinder { diff --git a/schema-util/openapi/src/main/java/io/apicurio/registry/rules/validity/ApicurioDataModelContentValidator.java b/schema-util/openapi/src/main/java/io/apicurio/registry/rules/validity/ApicurioDataModelContentValidator.java index 3ce358b698..067e1d9fed 100644 --- a/schema-util/openapi/src/main/java/io/apicurio/registry/rules/validity/ApicurioDataModelContentValidator.java +++ b/schema-util/openapi/src/main/java/io/apicurio/registry/rules/validity/ApicurioDataModelContentValidator.java @@ -33,26 +33,29 @@ public abstract class ApicurioDataModelContentValidator implements ContentValida * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { Document document = null; if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { JsonNode node = ContentTypeUtil.parseJsonOrYaml(content); document = Library.readDocument((ObjectNode) node); } catch (Exception e) { - throw new RuleViolationException("Syntax violation for " + getDataModelType() + " artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for " + getDataModelType() + " artifact.", + RuleType.VALIDITY, level.name(), e); } } if (level == ValidityLevel.FULL) { List problems = Library.validate(document, null); if (!problems.isEmpty()) { - Set causes = problems.stream().map(problem -> new RuleViolation(problem.message, problem.nodePath.toString())).collect(Collectors.toSet()); + Set causes = problems.stream() + .map(problem -> new RuleViolation(problem.message, problem.nodePath.toString())) + .collect(Collectors.toSet()); throw new RuleViolationException( - "The " + getDataModelType() + " artifact is not semantically valid. " + problems.size() + " problems found.", - RuleType.VALIDITY, - level.name(), - causes); + "The " + getDataModelType() + " artifact is not semantically valid. " + + problems.size() + " problems found.", + RuleType.VALIDITY, level.name(), causes); } } } @@ -61,14 +64,17 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { Set mappedRefs = references.stream().map(ref -> ref.getName()).collect(Collectors.toSet()); Set all$refs = getAll$refs(content); - Set violations = all$refs.stream().filter(ref -> !mappedRefs.contains(ref)).map(missingRef -> { - return new RuleViolation("Unmapped reference detected.", missingRef); - }).collect(Collectors.toSet()); + Set violations = all$refs.stream().filter(ref -> !mappedRefs.contains(ref)) + .map(missingRef -> { + return new RuleViolation("Unmapped reference detected.", missingRef); + }).collect(Collectors.toSet()); if (!violations.isEmpty()) { - throw new RuleViolationException("Unmapped reference(s) detected.", RuleType.INTEGRITY, IntegrityLevel.ALL_REFS_MAPPED.name(), violations); + throw new RuleViolationException("Unmapped reference(s) detected.", RuleType.INTEGRITY, + IntegrityLevel.ALL_REFS_MAPPED.name(), violations); } } @@ -85,12 +91,12 @@ public void validateReferences(TypedContent content, List ref } /** - * Returns the type of data model being validated. Subclasses must implement. + * Returns the type of data model being validated. Subclasses must implement. */ protected abstract String getDataModelType(); private static class RefFinder extends AllNodeVisitor { - + Set references = new HashSet<>(); /** @@ -105,7 +111,7 @@ protected void visitNode(Node node) { } } } - + } } diff --git a/schema-util/protobuf/pom.xml b/schema-util/protobuf/pom.xml index 24ad44b4c9..de3e16af54 100644 --- a/schema-util/protobuf/pom.xml +++ b/schema-util/protobuf/pom.xml @@ -1,55 +1,53 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-protobuf - jar - apicurio-registry-schema-util-protobuf + apicurio-registry-schema-util-protobuf + jar + apicurio-registry-schema-util-protobuf - + - - io.apicurio - apicurio-registry-schema-util-common - + + io.apicurio + apicurio-registry-schema-util-common + - - io.apicurio - apicurio-registry-protobuf-schema-utilities - - - com.google.protobuf - protobuf-java-util - - + + io.apicurio + apicurio-registry-protobuf-schema-utilities + + + com.google.protobuf + protobuf-java-util + + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/canon/ProtobufContentCanonicalizer.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/canon/ProtobufContentCanonicalizer.java index f81f36fd6f..e71146f8a0 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/canon/ProtobufContentCanonicalizer.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/canon/ProtobufContentCanonicalizer.java @@ -11,21 +11,23 @@ /** * A Protobuf implementation of a content Canonicalizer. - * */ public class ProtobufContentCanonicalizer implements ContentCanonicalizer { /** - * @see io.apicurio.registry.content.canon.ContentCanonicalizer#canonicalize(TypedContent, Map) + * @see io.apicurio.registry.content.canon.ContentCanonicalizer#canonicalize(TypedContent, Map) */ @Override public TypedContent canonicalize(TypedContent content, Map resolvedReferences) { try { - ProtoFileElement fileElem = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, content.getContent().content()); + ProtoFileElement fileElem = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, + content.getContent().content()); - //TODO maybe use FileDescriptorUtils to convert to a FileDescriptor and then convert back to ProtoFileElement + // TODO maybe use FileDescriptorUtils to convert to a FileDescriptor and then convert back to + // ProtoFileElement - return TypedContent.create(ContentHandle.create(fileElem.toSchema()), ContentTypes.APPLICATION_PROTOBUF); + return TypedContent.create(ContentHandle.create(fileElem.toSchema()), + ContentTypes.APPLICATION_PROTOBUF); } catch (Throwable e) { return content; } diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/dereference/ProtobufDereferencer.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/dereference/ProtobufDereferencer.java index 6f26afe5be..a3aee6e1c8 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/dereference/ProtobufDereferencer.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/dereference/ProtobufDereferencer.java @@ -19,15 +19,15 @@ public class ProtobufDereferencer implements ContentDereferencer { @Override public TypedContent dereference(TypedContent content, Map resolvedReferences) { - final ProtoFileElement protoFileElement = ProtobufFile.toProtoFileElement(content.getContent().content()); - final Map schemaDefs = Collections.unmodifiableMap(resolvedReferences.entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> e.getValue().getContent().content() - ))); - - DescriptorProtos.FileDescriptorProto fileDescriptorProto = FileDescriptorUtils.toFileDescriptorProto(content.getContent().content(), FileDescriptorUtils.firstMessage(protoFileElement).getName(), Optional.ofNullable(protoFileElement.getPackageName()), schemaDefs); + final ProtoFileElement protoFileElement = ProtobufFile + .toProtoFileElement(content.getContent().content()); + final Map schemaDefs = Collections + .unmodifiableMap(resolvedReferences.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getContent().content()))); + + DescriptorProtos.FileDescriptorProto fileDescriptorProto = FileDescriptorUtils.toFileDescriptorProto( + content.getContent().content(), FileDescriptorUtils.firstMessage(protoFileElement).getName(), + Optional.ofNullable(protoFileElement.getPackageName()), schemaDefs); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try { @@ -36,12 +36,14 @@ public TypedContent dereference(TypedContent content, Map throw new RuntimeException(e); } - //Dereference returns the whole file descriptor bytes representing the main protobuf schema with the required dependencies. - return TypedContent.create(ContentHandle.create(outputStream.toByteArray()), ContentTypes.APPLICATION_PROTOBUF); + // Dereference returns the whole file descriptor bytes representing the main protobuf schema with the + // required dependencies. + return TypedContent.create(ContentHandle.create(outputStream.toByteArray()), + ContentTypes.APPLICATION_PROTOBUF); } /** - * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(TypedContent, Map) + * @see io.apicurio.registry.content.dereference.ContentDereferencer#rewriteReferences(TypedContent, Map) */ @Override public TypedContent rewriteReferences(TypedContent content, Map resolvedReferenceUrls) { diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/refs/ProtobufReferenceFinder.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/refs/ProtobufReferenceFinder.java index fb248db9ad..8d67aaeb7a 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/content/refs/ProtobufReferenceFinder.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/content/refs/ProtobufReferenceFinder.java @@ -15,16 +15,17 @@ * A Google Protocol Buffer implementation of a reference finder. */ public class ProtobufReferenceFinder implements ReferenceFinder { - + private static final Logger log = LoggerFactory.getLogger(ProtobufReferenceFinder.class); /** - * @see io.apicurio.registry.content.refs.ReferenceFinder#findExternalReferences(TypedContent) + * @see io.apicurio.registry.content.refs.ReferenceFinder#findExternalReferences(TypedContent) */ @Override public Set findExternalReferences(TypedContent content) { try { - ProtoFileElement protoFileElement = ProtobufFile.toProtoFileElement(content.getContent().content()); + ProtoFileElement protoFileElement = ProtobufFile + .toProtoFileElement(content.getContent().content()); Set allImports = new HashSet<>(); allImports.addAll(protoFileElement.getImports()); allImports.addAll(protoFileElement.getPublicImports()); diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/ProtobufCompatibilityChecker.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/ProtobufCompatibilityChecker.java index 0f8e707b6e..c43e9a9d0c 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/ProtobufCompatibilityChecker.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/ProtobufCompatibilityChecker.java @@ -13,7 +13,9 @@ public class ProtobufCompatibilityChecker implements CompatibilityChecker { @Override - public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, List existingArtifacts, TypedContent proposedArtifact, Map resolvedReferences) { + public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compatibilityLevel, + List existingArtifacts, TypedContent proposedArtifact, + Map resolvedReferences) { requireNonNull(compatibilityLevel, "compatibilityLevel MUST NOT be null"); requireNonNull(existingArtifacts, "existingArtifacts MUST NOT be null"); requireNonNull(proposedArtifact, "proposedArtifact MUST NOT be null"); @@ -22,7 +24,8 @@ public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compati return CompatibilityExecutionResult.compatible(); } - ProtobufFile fileBefore = new ProtobufFile(existingArtifacts.get(existingArtifacts.size() - 1).getContent().content()); + ProtobufFile fileBefore = new ProtobufFile( + existingArtifacts.get(existingArtifacts.size() - 1).getContent().content()); ProtobufFile fileAfter = new ProtobufFile(proposedArtifact.getContent().content()); switch (compatibilityLevel) { @@ -50,12 +53,14 @@ public CompatibilityExecutionResult testCompatibility(CompatibilityLevel compati } @NotNull - private CompatibilityExecutionResult testFullTransitive(List existingSchemas, ProtobufFile fileAfter) { + private CompatibilityExecutionResult testFullTransitive(List existingSchemas, + ProtobufFile fileAfter) { ProtobufFile fileBefore; for (TypedContent existing : existingSchemas) { fileBefore = new ProtobufFile(existing.getContent().content()); if (!testFull(fileBefore, fileAfter).isCompatible()) { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not fully compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not fully compatible."); } } return CompatibilityExecutionResult.compatible(); @@ -63,23 +68,29 @@ private CompatibilityExecutionResult testFullTransitive(List exist @NotNull private CompatibilityExecutionResult testFull(ProtobufFile fileBefore, ProtobufFile fileAfter) { - ProtobufCompatibilityCheckerLibrary backwardChecker = new ProtobufCompatibilityCheckerLibrary(fileBefore, fileAfter); - ProtobufCompatibilityCheckerLibrary forwardChecker = new ProtobufCompatibilityCheckerLibrary(fileAfter, fileBefore); + ProtobufCompatibilityCheckerLibrary backwardChecker = new ProtobufCompatibilityCheckerLibrary( + fileBefore, fileAfter); + ProtobufCompatibilityCheckerLibrary forwardChecker = new ProtobufCompatibilityCheckerLibrary( + fileAfter, fileBefore); if (!backwardChecker.validate() && !forwardChecker.validate()) { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not fully compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not fully compatible."); } else { return CompatibilityExecutionResult.compatible(); } } @NotNull - private CompatibilityExecutionResult testForwardTransitive(List existingSchemas, ProtobufFile fileAfter) { + private CompatibilityExecutionResult testForwardTransitive(List existingSchemas, + ProtobufFile fileAfter) { ProtobufFile fileBefore; for (TypedContent existing : existingSchemas) { fileBefore = new ProtobufFile(existing.getContent().content()); - ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileAfter, fileBefore); + ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileAfter, + fileBefore); if (!checker.validate()) { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not forward compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not forward compatible."); } } return CompatibilityExecutionResult.compatible(); @@ -87,22 +98,27 @@ private CompatibilityExecutionResult testForwardTransitive(List ex @NotNull private CompatibilityExecutionResult testForward(ProtobufFile fileBefore, ProtobufFile fileAfter) { - ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileAfter, fileBefore); + ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileAfter, + fileBefore); if (checker.validate()) { return CompatibilityExecutionResult.compatible(); } else { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not forward compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not forward compatible."); } } @NotNull - private CompatibilityExecutionResult testBackwardTransitive(List existingSchemas, ProtobufFile fileAfter) { + private CompatibilityExecutionResult testBackwardTransitive(List existingSchemas, + ProtobufFile fileAfter) { ProtobufFile fileBefore; for (TypedContent existing : existingSchemas) { fileBefore = new ProtobufFile(existing.getContent().content()); - ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, fileAfter); + ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, + fileAfter); if (!checker.validate()) { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not backward compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not backward compatible."); } } return CompatibilityExecutionResult.compatible(); @@ -110,11 +126,13 @@ private CompatibilityExecutionResult testBackwardTransitive(List e @NotNull private CompatibilityExecutionResult testBackward(ProtobufFile fileBefore, ProtobufFile fileAfter) { - ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, fileAfter); + ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, + fileAfter); if (checker.validate()) { return CompatibilityExecutionResult.compatible(); } else { - return CompatibilityExecutionResult.incompatible("The new version of the protobuf artifact is not backward compatible."); + return CompatibilityExecutionResult + .incompatible("The new version of the protobuf artifact is not backward compatible."); } } } \ No newline at end of file diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/protobuf/ProtobufCompatibilityCheckerLibrary.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/protobuf/ProtobufCompatibilityCheckerLibrary.java index 8e60002e1d..c4a0df038a 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/protobuf/ProtobufCompatibilityCheckerLibrary.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/compatibility/protobuf/ProtobufCompatibilityCheckerLibrary.java @@ -22,7 +22,7 @@ * @see Protolock */ public class ProtobufCompatibilityCheckerLibrary { - // TODO https://github.com/square/wire/issues/797 RFE: capture EnumElement reserved info + // TODO https://github.com/square/wire/issues/797 RFE: capture EnumElement reserved info private final ProtobufFile fileBefore; private final ProtobufFile fileAfter; @@ -53,7 +53,8 @@ public List findDifferences() { } /** - * Determine if any message's previously reserved fields or IDs are now being used as part of the same message. + * Determine if any message's previously reserved fields or IDs are now being used as part of the same + * message. *

* Note: TODO can't currently validate enum reserved fields, as the parser doesn't capture those. * @@ -72,7 +73,9 @@ public List checkNoUsingReservedFields() { Set intersection = new HashSet<>(entry.getValue()); intersection.retainAll(old); if (!intersection.isEmpty()) { - issues.add(ProtobufDifference.from(String.format("Conflict of reserved %d fields, message %s", intersection.size(), entry.getKey()))); + issues.add(ProtobufDifference + .from(String.format("Conflict of reserved %d fields, message %s", + intersection.size(), entry.getKey()))); } } } @@ -103,10 +106,13 @@ public List checkNoRemovingReservedFields() { int diff = entry.getValue().size() - intersection.size(); if (diff != 0) { - issues.add(ProtobufDifference.from(String.format("%d reserved fields were removed, message %s", diff, entry.getKey()))); + issues.add(ProtobufDifference.from(String + .format("%d reserved fields were removed, message %s", diff, entry.getKey()))); } } else { - issues.add(ProtobufDifference.from(String.format("%d reserved fields were removed, message %s", entry.getValue().size(), entry.getKey()))); + issues.add( + ProtobufDifference.from(String.format("%d reserved fields were removed, message %s", + entry.getValue().size(), entry.getKey()))); } } @@ -141,21 +147,24 @@ public List checkNoRemovingFieldsWithoutReserve() { // count once for each non-reserved field name Set reserved = afterReservedFields.getOrDefault(entry.getKey(), Collections.emptySet()); - Set nonreserved = afterNonreservedFields.getOrDefault(entry.getKey(), Collections.emptySet()); + Set nonreserved = afterNonreservedFields.getOrDefault(entry.getKey(), + Collections.emptySet()); Set nonReservedRemovedFieldNames = new HashSet<>(removedFieldNames); nonReservedRemovedFieldNames.removeAll(reserved); issuesCount += nonReservedRemovedFieldNames.size(); // count again for each non-reserved field id for (FieldElement fieldElement : entry.getValue().values()) { - if (removedFieldNames.contains(fieldElement.getName()) && - !(reserved.contains(fieldElement.getTag()) || nonreserved.contains(fieldElement.getTag()))) { + if (removedFieldNames.contains(fieldElement.getName()) + && !(reserved.contains(fieldElement.getTag()) + || nonreserved.contains(fieldElement.getTag()))) { issuesCount++; } } if (issuesCount > 0) { - issues.add(ProtobufDifference.from(String.format("%d fields removed without reservation, message %s", issuesCount, entry.getKey()))); + issues.add(ProtobufDifference.from(String.format( + "%d fields removed without reservation, message %s", issuesCount, entry.getKey()))); } } @@ -181,7 +190,9 @@ public List checkNoChangingFieldIDs() { for (Map.Entry beforeKV : entry.getValue().entrySet()) { FieldElement afterFE = afterMap.get(beforeKV.getKey()); if (afterFE != null && beforeKV.getValue().getTag() != afterFE.getTag()) { - issues.add(ProtobufDifference.from(String.format("Conflict, field id changed, message %s , before: %s , after %s", entry.getKey(), beforeKV.getValue().getTag(), afterFE.getTag()))); + issues.add(ProtobufDifference.from(String.format( + "Conflict, field id changed, message %s , before: %s , after %s", + entry.getKey(), beforeKV.getValue().getTag(), afterFE.getTag()))); } } } @@ -197,7 +208,9 @@ public List checkNoChangingFieldIDs() { for (Map.Entry beforeKV : entry.getValue().entrySet()) { EnumConstantElement afterECE = afterMap.get(beforeKV.getKey()); if (afterECE != null && beforeKV.getValue().getTag() != afterECE.getTag()) { - issues.add(ProtobufDifference.from(String.format("Conflict, field id changed, message %s , before: %s , after %s", entry.getKey(), beforeKV.getValue().getTag(), afterECE.getTag()))); + issues.add(ProtobufDifference.from(String.format( + "Conflict, field id changed, message %s , before: %s , after %s", + entry.getKey(), beforeKV.getValue().getTag(), afterECE.getTag()))); } } } @@ -231,11 +244,16 @@ public List checkNoChangingFieldTypes() { String afterType = normalizeType(fileAfter, afterFE.getType()); if (afterFE != null && !beforeType.equals(afterType)) { - issues.add(ProtobufDifference.from(String.format("Field type changed, message %s , before: %s , after %s", entry.getKey(), beforeKV.getValue().getType(), afterFE.getType()))); + issues.add(ProtobufDifference.from(String.format( + "Field type changed, message %s , before: %s , after %s", entry.getKey(), + beforeKV.getValue().getType(), afterFE.getType()))); } - if (afterFE != null && !Objects.equals(beforeKV.getValue().getLabel(), afterFE.getLabel())) { - issues.add(ProtobufDifference.from(String.format("Field label changed, message %s , before: %s , after %s", entry.getKey(), beforeKV.getValue().getLabel(), afterFE.getLabel()))); + if (afterFE != null + && !Objects.equals(beforeKV.getValue().getLabel(), afterFE.getLabel())) { + issues.add(ProtobufDifference.from(String.format( + "Field label changed, message %s , before: %s , after %s", entry.getKey(), + beforeKV.getValue().getLabel(), afterFE.getLabel()))); } } } @@ -247,10 +265,10 @@ public List checkNoChangingFieldTypes() { private String normalizeType(ProtobufFile file, String type) { if (type != null && type.startsWith(".")) { - //it's fully qualified + // it's fully qualified String nodot = type.substring(1); if (file.getPackageName() != null && nodot.startsWith(file.getPackageName())) { - //it's fully qualified but it's a message in the same .proto file + // it's fully qualified but it's a message in the same .proto file return nodot.substring(file.getPackageName().length() + 1); } return nodot; @@ -281,7 +299,9 @@ public List checkNoChangingFieldNames() { String nameAfter = afterMap.get(beforeKV.getKey()); if (!beforeKV.getValue().equals(nameAfter)) { - issues.add(ProtobufDifference.from(String.format("Field name changed, message %s , before: %s , after %s", entry.getKey(), beforeKV.getValue(), nameAfter))); + issues.add(ProtobufDifference + .from(String.format("Field name changed, message %s , before: %s , after %s", + entry.getKey(), beforeKV.getValue(), nameAfter))); } } } @@ -311,7 +331,8 @@ public List checkNoRemovingServiceRPCs() { } if (diff.size() > 0) { - issues.add(ProtobufDifference.from(String.format("%d rpc services removed, message %s", diff.size(), entry.getKey()))); + issues.add(ProtobufDifference.from( + String.format("%d rpc services removed, message %s", diff.size(), entry.getKey()))); } } @@ -338,7 +359,9 @@ public List checkNoChangingRPCSignature() { for (Map.Entry beforeKV : entry.getValue().entrySet()) { String afterSig = afterMap.get(beforeKV.getKey()); if (!beforeKV.getValue().equals(afterSig)) { - issues.add(ProtobufDifference.from(String.format("rpc service signature changed, message %s , before %s , after %s", entry.getKey(), beforeKV.getValue(), afterSig))); + issues.add(ProtobufDifference.from(String.format( + "rpc service signature changed, message %s , before %s , after %s", + entry.getKey(), beforeKV.getValue(), afterSig))); } } @@ -366,8 +389,11 @@ public List checkRequiredFields() { if (beforeMap != null) { for (Map.Entry afterKV : entry.getValue().entrySet()) { FieldElement afterSig = beforeMap.get(afterKV.getKey()); - if (afterSig == null && afterKV.getValue().getLabel() != null && afterKV.getValue().getLabel().equals(Field.Label.REQUIRED)) { - issues.add(ProtobufDifference.from(String.format("required field added in new version, message %s, after %s", entry.getKey(), afterKV.getValue()))); + if (afterSig == null && afterKV.getValue().getLabel() != null + && afterKV.getValue().getLabel().equals(Field.Label.REQUIRED)) { + issues.add(ProtobufDifference.from( + String.format("required field added in new version, message %s, after %s", + entry.getKey(), afterKV.getValue()))); } } } diff --git a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/validity/ProtobufContentValidator.java b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/validity/ProtobufContentValidator.java index ebfe4a805e..07f8966cf7 100644 --- a/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/validity/ProtobufContentValidator.java +++ b/schema-util/protobuf/src/main/java/io/apicurio/registry/rules/validity/ProtobufContentValidator.java @@ -24,7 +24,6 @@ /** * A content validator implementation for the Protobuf content type. - * */ public class ProtobufContentValidator implements ContentValidator { @@ -38,34 +37,44 @@ public ProtobufContentValidator() { * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try { if (resolvedReferences == null || resolvedReferences.isEmpty()) { ProtobufFile.toProtoFileElement(content.getContent().content()); } else { - final ProtoFileElement protoFileElement = ProtobufFile.toProtoFileElement(content.getContent().content()); - final Map dependencies = Collections.unmodifiableMap(resolvedReferences.entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> ProtobufFile.toProtoFileElement(e.getValue().getContent().content()) - ))); + final ProtoFileElement protoFileElement = ProtobufFile + .toProtoFileElement(content.getContent().content()); + final Map dependencies = Collections + .unmodifiableMap(resolvedReferences.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> ProtobufFile + .toProtoFileElement(e.getValue().getContent().content())))); MessageElement firstMessage = FileDescriptorUtils.firstMessage(protoFileElement); if (firstMessage != null) { try { - final Descriptors.Descriptor fileDescriptor = FileDescriptorUtils.toDescriptor(firstMessage.getName(), protoFileElement, dependencies); - TypedContent.create(ContentHandle.create(fileDescriptor.toString()), ContentTypes.APPLICATION_PROTOBUF); + final Descriptors.Descriptor fileDescriptor = FileDescriptorUtils + .toDescriptor(firstMessage.getName(), protoFileElement, dependencies); + TypedContent.create(ContentHandle.create(fileDescriptor.toString()), + ContentTypes.APPLICATION_PROTOBUF); } catch (IllegalStateException ise) { - //If we fail to init the dynamic schema, try to get the descriptor from the proto element - TypedContent.create(ContentHandle.create(getFileDescriptorFromElement(protoFileElement).toString()), ContentTypes.APPLICATION_PROTOBUF); + // If we fail to init the dynamic schema, try to get the descriptor from the proto + // element + TypedContent.create( + ContentHandle.create( + getFileDescriptorFromElement(protoFileElement).toString()), + ContentTypes.APPLICATION_PROTOBUF); } } else { - TypedContent.create(ContentHandle.create(getFileDescriptorFromElement(protoFileElement).toString()), ContentTypes.APPLICATION_PROTOBUF); + TypedContent.create( + ContentHandle + .create(getFileDescriptorFromElement(protoFileElement).toString()), + ContentTypes.APPLICATION_PROTOBUF); } } } catch (Exception e) { - throw new RuleViolationException("Syntax violation for Protobuf artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for Protobuf artifact.", RuleType.VALIDITY, + level.name(), e); } } } @@ -74,29 +83,35 @@ public void validate(ValidityLevel level, TypedContent content, Map references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { try { - Set mappedRefs = references.stream().map(ref -> ref.getName()).collect(Collectors.toSet()); + Set mappedRefs = references.stream().map(ref -> ref.getName()) + .collect(Collectors.toSet()); - ProtoFileElement protoFileElement = ProtobufFile.toProtoFileElement(content.getContent().content()); + ProtoFileElement protoFileElement = ProtobufFile + .toProtoFileElement(content.getContent().content()); Set allImports = new HashSet<>(); allImports.addAll(protoFileElement.getImports()); allImports.addAll(protoFileElement.getPublicImports()); - - Set violations = allImports.stream().filter(_import -> !mappedRefs.contains(_import)).map(missingRef -> { - return new RuleViolation("Unmapped reference detected.", missingRef); - }).collect(Collectors.toSet()); + + Set violations = allImports.stream() + .filter(_import -> !mappedRefs.contains(_import)).map(missingRef -> { + return new RuleViolation("Unmapped reference detected.", missingRef); + }).collect(Collectors.toSet()); if (!violations.isEmpty()) { - throw new RuleViolationException("Unmapped reference(s) detected.", RuleType.INTEGRITY, IntegrityLevel.ALL_REFS_MAPPED.name(), violations); + throw new RuleViolationException("Unmapped reference(s) detected.", RuleType.INTEGRITY, + IntegrityLevel.ALL_REFS_MAPPED.name(), violations); } } catch (RuleViolationException rve) { throw rve; } catch (Exception e) { - // Do nothing - we don't care if it can't validate. Another rule will handle that. + // Do nothing - we don't care if it can't validate. Another rule will handle that. } } - private ProtobufSchema getFileDescriptorFromElement(ProtoFileElement fileElem) throws Descriptors.DescriptorValidationException { + private ProtobufSchema getFileDescriptorFromElement(ProtoFileElement fileElem) + throws Descriptors.DescriptorValidationException { Descriptors.FileDescriptor fileDescriptor = FileDescriptorUtils.protoFileToFileDescriptor(fileElem); return new ProtobufSchema(fileDescriptor, fileElem); } diff --git a/schema-util/util-provider/pom.xml b/schema-util/util-provider/pom.xml index 9fbf79a8c6..a928bb7d86 100644 --- a/schema-util/util-provider/pom.xml +++ b/schema-util/util-provider/pom.xml @@ -1,92 +1,90 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-provider - jar - apicurio-registry-schema-util-provider + apicurio-registry-schema-util-provider + jar + apicurio-registry-schema-util-provider - + - - io.apicurio - apicurio-registry-schema-util-common - - - io.apicurio - apicurio-registry-schema-util-json - - - io.apicurio - apicurio-registry-schema-util-protobuf - - - io.apicurio - apicurio-registry-schema-util-asyncapi - - - io.apicurio - apicurio-registry-schema-util-avro - - - io.apicurio - apicurio-registry-schema-util-graphql - - - io.apicurio - apicurio-registry-schema-util-kconnect - - - io.apicurio - apicurio-registry-schema-util-openapi - - - io.apicurio - apicurio-registry-schema-util-wsdl - - - io.apicurio - apicurio-registry-schema-util-xml - - - io.apicurio - apicurio-registry-schema-util-xsd - + + io.apicurio + apicurio-registry-schema-util-common + + + io.apicurio + apicurio-registry-schema-util-json + + + io.apicurio + apicurio-registry-schema-util-protobuf + + + io.apicurio + apicurio-registry-schema-util-asyncapi + + + io.apicurio + apicurio-registry-schema-util-avro + + + io.apicurio + apicurio-registry-schema-util-graphql + + + io.apicurio + apicurio-registry-schema-util-kconnect + + + io.apicurio + apicurio-registry-schema-util-openapi + + + io.apicurio + apicurio-registry-schema-util-wsdl + + + io.apicurio + apicurio-registry-schema-util-xml + + + io.apicurio + apicurio-registry-schema-util-xsd + - - org.junit.jupiter - junit-jupiter - test - - + + org.junit.jupiter + junit-jupiter + test + + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ArtifactTypeUtilProvider.java index ba0efe84ba..3071f274d8 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ArtifactTypeUtilProvider.java @@ -11,20 +11,15 @@ import java.util.Map; /** - * Interface providing different utils per artifact type - * * compatibility checker - * * content canonicalizer - * * content validator - * * rules - * * etc ... - * + * Interface providing different utils per artifact type * compatibility checker * content canonicalizer * + * content validator * rules * etc ... */ public interface ArtifactTypeUtilProvider { String getArtifactType(); /** - * Returns true if the given content is accepted as handled by the provider. Useful - * to know if e.g. some bit of content is an AVRO or OPENAPI. + * Returns true if the given content is accepted as handled by the provider. Useful to know if e.g. some + * bit of content is an AVRO or OPENAPI. */ boolean acceptsContent(TypedContent content, Map resolvedReferences); @@ -37,6 +32,6 @@ public interface ArtifactTypeUtilProvider { ContentExtractor getContentExtractor(); ContentDereferencer getContentDereferencer(); - + ReferenceFinder getReferenceFinder(); } diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AsyncApiArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AsyncApiArtifactTypeUtilProvider.java index aa5e8f7e3b..cd15f1451e 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AsyncApiArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AsyncApiArtifactTypeUtilProvider.java @@ -26,7 +26,8 @@ public boolean acceptsContent(TypedContent content, Map re try { String contentType = content.getContentType(); JsonNode tree = null; - // If the content is YAML, then convert it to JSON first (the data-models library only accepts JSON). + // If the content is YAML, then convert it to JSON first (the data-models library only accepts + // JSON). if (contentType.toLowerCase().contains("yml") || contentType.toLowerCase().contains("yaml")) { tree = ContentTypeUtil.parseYaml(content.getContent()); } else { diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AvroArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AvroArtifactTypeUtilProvider.java index 802014a94c..259d52debe 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AvroArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/AvroArtifactTypeUtilProvider.java @@ -27,7 +27,8 @@ public class AvroArtifactTypeUtilProvider extends AbstractArtifactTypeUtilProvid private static final Pattern QUOTED_BRACKETS = Pattern.compile(": *\"\\{}\""); /** - * Given a content removes any quoted brackets. This is useful for some validation corner cases in avro where some libraries detects quoted brackets as valid and others as invalid + * Given a content removes any quoted brackets. This is useful for some validation corner cases in avro + * where some libraries detects quoted brackets as valid and others as invalid */ private static String removeQuotedBrackets(String content) { return QUOTED_BRACKETS.matcher(content).replaceAll(":{}"); @@ -37,7 +38,8 @@ private static String removeQuotedBrackets(String content) { public boolean acceptsContent(TypedContent content, Map resolvedReferences) { try { String contentType = content.getContentType(); - if (contentType.toLowerCase().contains("json") && ContentTypeUtil.isParsableJson(content.getContent())) { + if (contentType.toLowerCase().contains("json") + && ContentTypeUtil.isParsableJson(content.getContent())) { // Avro without quote final Schema.Parser parser = new Schema.Parser(); final List schemaRefs = new ArrayList<>(); @@ -52,7 +54,7 @@ public boolean acceptsContent(TypedContent content, Map re return true; } } catch (Exception e) { - //ignored + // ignored } return false; } @@ -86,7 +88,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return new AvroDereferencer(); } - + @Override public ReferenceFinder getReferenceFinder() { return new JsonSchemaReferenceFinder(); diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/DefaultArtifactTypeUtilProviderImpl.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/DefaultArtifactTypeUtilProviderImpl.java index ebe9e26ed0..bc82e7f6ae 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/DefaultArtifactTypeUtilProviderImpl.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/DefaultArtifactTypeUtilProviderImpl.java @@ -11,33 +11,22 @@ public class DefaultArtifactTypeUtilProviderImpl implements ArtifactTypeUtilProv protected Map map = new ConcurrentHashMap<>(); protected List providers = new ArrayList( - List.of( - new ProtobufArtifactTypeUtilProvider(), - new OpenApiArtifactTypeUtilProvider(), - new AsyncApiArtifactTypeUtilProvider(), - new JsonArtifactTypeUtilProvider(), - new AvroArtifactTypeUtilProvider(), - new GraphQLArtifactTypeUtilProvider(), - new KConnectArtifactTypeUtilProvider(), - new WsdlArtifactTypeUtilProvider(), - new XsdArtifactTypeUtilProvider(), - new XmlArtifactTypeUtilProvider()) - ); + List.of(new ProtobufArtifactTypeUtilProvider(), new OpenApiArtifactTypeUtilProvider(), + new AsyncApiArtifactTypeUtilProvider(), new JsonArtifactTypeUtilProvider(), + new AvroArtifactTypeUtilProvider(), new GraphQLArtifactTypeUtilProvider(), + new KConnectArtifactTypeUtilProvider(), new WsdlArtifactTypeUtilProvider(), + new XsdArtifactTypeUtilProvider(), new XmlArtifactTypeUtilProvider())); @Override public ArtifactTypeUtilProvider getArtifactTypeProvider(String type) { - return map.computeIfAbsent(type, t -> - providers.stream() - .filter(a -> a.getArtifactType().equals(t)) - .findFirst() - .orElseThrow(() -> new IllegalStateException("No such artifact type provider: " + t))); + return map.computeIfAbsent(type, + t -> providers.stream().filter(a -> a.getArtifactType().equals(t)).findFirst().orElseThrow( + () -> new IllegalStateException("No such artifact type provider: " + t))); } @Override public List getAllArtifactTypes() { - return providers.stream() - .map(a -> a.getArtifactType()) - .collect(Collectors.toList()); + return providers.stream().map(a -> a.getArtifactType()).collect(Collectors.toList()); } @Override diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/GraphQLArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/GraphQLArtifactTypeUtilProvider.java index eed629d501..342c912540 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/GraphQLArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/GraphQLArtifactTypeUtilProvider.java @@ -25,7 +25,8 @@ public boolean acceptsContent(TypedContent content, Map re try { String contentType = content.getContentType(); if (contentType.toLowerCase().contains("graph")) { - TypeDefinitionRegistry typeRegistry = new SchemaParser().parse(content.getContent().content()); + TypeDefinitionRegistry typeRegistry = new SchemaParser() + .parse(content.getContent().content()); if (typeRegistry != null) { return true; } @@ -65,7 +66,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return null; } - + /** * @see io.apicurio.registry.types.provider.ArtifactTypeUtilProvider#getReferenceFinder() */ diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/JsonArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/JsonArtifactTypeUtilProvider.java index d65e28cead..6dd7047de6 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/JsonArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/JsonArtifactTypeUtilProvider.java @@ -25,9 +25,11 @@ public class JsonArtifactTypeUtilProvider extends AbstractArtifactTypeUtilProvid public boolean acceptsContent(TypedContent content, Map resolvedReferences) { try { String contentType = content.getContentType(); - if (contentType.toLowerCase().contains("json") && ContentTypeUtil.isParsableJson(content.getContent())) { + if (contentType.toLowerCase().contains("json") + && ContentTypeUtil.isParsableJson(content.getContent())) { JsonNode tree = ContentTypeUtil.parseJson(content.getContent()); - if (tree.has("$schema") && tree.get("$schema").asText().contains("json-schema.org") || tree.has("properties")) { + if (tree.has("$schema") && tree.get("$schema").asText().contains("json-schema.org") + || tree.has("properties")) { return true; } } @@ -66,7 +68,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return new AsyncApiDereferencer(); } - + @Override public ReferenceFinder getReferenceFinder() { return new JsonSchemaReferenceFinder(); diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/KConnectArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/KConnectArtifactTypeUtilProvider.java index d04e9fe96b..a6348054a9 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/KConnectArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/KConnectArtifactTypeUtilProvider.java @@ -51,7 +51,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return null; } - + /** * @see io.apicurio.registry.types.provider.ArtifactTypeUtilProvider#getReferenceFinder() */ diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/OpenApiArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/OpenApiArtifactTypeUtilProvider.java index d5b401090e..d13e489444 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/OpenApiArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/OpenApiArtifactTypeUtilProvider.java @@ -26,7 +26,8 @@ public boolean acceptsContent(TypedContent content, Map re try { String contentType = content.getContentType(); JsonNode tree = null; - // If the content is YAML, then convert it to JSON first (the data-models library only accepts JSON). + // If the content is YAML, then convert it to JSON first (the data-models library only accepts + // JSON). if (contentType.toLowerCase().contains("yml") || contentType.toLowerCase().contains("yaml")) { tree = ContentTypeUtil.parseYaml(content.getContent()); } else { @@ -70,7 +71,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return new AsyncApiDereferencer(); } - + @Override public ReferenceFinder getReferenceFinder() { return new OpenApiReferenceFinder(); diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ProtobufArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ProtobufArtifactTypeUtilProvider.java index 6d2dfc61f2..3a07a63fba 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ProtobufArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/ProtobufArtifactTypeUtilProvider.java @@ -34,7 +34,8 @@ public boolean acceptsContent(TypedContent content, Map re try { // Attempt to parse binary FileDescriptorProto byte[] bytes = Base64.getDecoder().decode(content.getContent().content()); - FileDescriptorUtils.fileDescriptorToProtoFile(DescriptorProtos.FileDescriptorProto.parseFrom(bytes)); + FileDescriptorUtils + .fileDescriptorToProtoFile(DescriptorProtos.FileDescriptorProto.parseFrom(bytes)); return true; } catch (Exception pe) { // Doesn't seem to be protobuf @@ -72,7 +73,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return new ProtobufDereferencer(); } - + @Override public ReferenceFinder getReferenceFinder() { return new ProtobufReferenceFinder(); diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/WsdlArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/WsdlArtifactTypeUtilProvider.java index ef76e32776..55c5e3ff61 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/WsdlArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/WsdlArtifactTypeUtilProvider.java @@ -26,8 +26,10 @@ public class WsdlArtifactTypeUtilProvider extends AbstractArtifactTypeUtilProvid public boolean acceptsContent(TypedContent content, Map resolvedReferences) { try { String contentType = content.getContentType(); - if (contentType.toLowerCase().contains("xml") && ContentTypeUtil.isParsableXml(content.getContent())) { - Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder().parse(content.getContent().stream()); + if (contentType.toLowerCase().contains("xml") + && ContentTypeUtil.isParsableXml(content.getContent())) { + Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder() + .parse(content.getContent().stream()); Element root = xmlDocument.getDocumentElement(); String ns = root.getNamespaceURI(); if (ns != null && (ns.equals("http://schemas.xmlsoap.org/wsdl/") @@ -53,7 +55,7 @@ public String getArtifactType() { */ @Override protected CompatibilityChecker createCompatibilityChecker() { - return NoopCompatibilityChecker.INSTANCE; + return NoopCompatibilityChecker.INSTANCE; } /** @@ -84,7 +86,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return null; } - + /** * @see io.apicurio.registry.types.provider.ArtifactTypeUtilProvider#getReferenceFinder() */ diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XmlArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XmlArtifactTypeUtilProvider.java index edc5db5d79..0a57a9e6c7 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XmlArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XmlArtifactTypeUtilProvider.java @@ -26,8 +26,10 @@ public class XmlArtifactTypeUtilProvider extends AbstractArtifactTypeUtilProvide public boolean acceptsContent(TypedContent content, Map resolvedReferences) { try { String contentType = content.getContentType(); - if (contentType.toLowerCase().contains("xml") && ContentTypeUtil.isParsableXml(content.getContent())) { - Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder().parse(content.getContent().stream()); + if (contentType.toLowerCase().contains("xml") + && ContentTypeUtil.isParsableXml(content.getContent())) { + Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder() + .parse(content.getContent().stream()); Element root = xmlDocument.getDocumentElement(); String ns = root.getNamespaceURI(); if (ns != null && ns.equals("http://www.w3.org/2001/XMLSchema")) { @@ -73,7 +75,7 @@ protected ContentCanonicalizer createContentCanonicalizer() { */ @Override protected ContentValidator createContentValidator() { - return new XmlContentValidator(); + return new XmlContentValidator(); } /** @@ -88,7 +90,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return null; } - + /** * @see io.apicurio.registry.types.provider.ArtifactTypeUtilProvider#getReferenceFinder() */ diff --git a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XsdArtifactTypeUtilProvider.java b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XsdArtifactTypeUtilProvider.java index d5f08acff6..53f823f69b 100644 --- a/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XsdArtifactTypeUtilProvider.java +++ b/schema-util/util-provider/src/main/java/io/apicurio/registry/types/provider/XsdArtifactTypeUtilProvider.java @@ -26,8 +26,10 @@ public class XsdArtifactTypeUtilProvider extends AbstractArtifactTypeUtilProvide public boolean acceptsContent(TypedContent content, Map resolvedReferences) { try { String contentType = content.getContentType(); - if (contentType.toLowerCase().contains("xml") && ContentTypeUtil.isParsableXml(content.getContent())) { - Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder().parse(content.getContent().stream()); + if (contentType.toLowerCase().contains("xml") + && ContentTypeUtil.isParsableXml(content.getContent())) { + Document xmlDocument = DocumentBuilderAccessor.getDocumentBuilder() + .parse(content.getContent().stream()); Element root = xmlDocument.getDocumentElement(); String ns = root.getNamespaceURI(); if (ns != null && ns.equals("http://www.w3.org/2001/XMLSchema")) { @@ -83,7 +85,7 @@ protected ContentExtractor createContentExtractor() { public ContentDereferencer getContentDereferencer() { return null; } - + /** * @see io.apicurio.registry.types.provider.ArtifactTypeUtilProvider#getReferenceFinder() */ diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/AsyncApiContentDereferencerTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/AsyncApiContentDereferencerTest.java index 76f8130c7f..58e55ab274 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/AsyncApiContentDereferencerTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/AsyncApiContentDereferencerTest.java @@ -18,14 +18,17 @@ public class AsyncApiContentDereferencerTest extends ArtifactUtilProviderTestBas public void testRewriteReferences() { TypedContent content = resourceToTypedContentHandle("asyncapi-to-rewrite.json"); AsyncApiDereferencer dereferencer = new AsyncApiDereferencer(); - TypedContent modifiedContent = dereferencer.rewriteReferences(content, Map.of( - "./TradeKey.avsc", "https://www.example.org/schemas/TradeKey.avsc", - "./common-types.json#/components/schemas/User", "https://www.example.org/schemas/common-types.json#/components/schemas/User")); - + TypedContent modifiedContent = dereferencer.rewriteReferences(content, + Map.of("./TradeKey.avsc", "https://www.example.org/schemas/TradeKey.avsc", + "./common-types.json#/components/schemas/User", + "https://www.example.org/schemas/common-types.json#/components/schemas/User")); + ReferenceFinder finder = new AsyncApiReferenceFinder(); Set externalReferences = finder.findExternalReferences(modifiedContent); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/common-types.json#/components/schemas/User"))); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/TradeKey.avsc"))); + Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference( + "https://www.example.org/schemas/common-types.json#/components/schemas/User"))); + Assertions.assertTrue(externalReferences + .contains(new JsonPointerExternalReference("https://www.example.org/schemas/TradeKey.avsc"))); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/JsonSchemaContentDereferencerTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/JsonSchemaContentDereferencerTest.java index 12a962640c..02adf439d9 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/JsonSchemaContentDereferencerTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/JsonSchemaContentDereferencerTest.java @@ -18,14 +18,16 @@ public class JsonSchemaContentDereferencerTest extends ArtifactUtilProviderTestB public void testRewriteReferences() { TypedContent content = resourceToTypedContentHandle("json-schema-to-rewrite.json"); JsonSchemaDereferencer dereferencer = new JsonSchemaDereferencer(); - TypedContent modifiedContent = dereferencer.rewriteReferences(content, Map.of( - "./address.json", "https://www.example.org/schemas/address.json", - "./ssn.json", "https://www.example.org/schemas/ssn.json")); - + TypedContent modifiedContent = dereferencer.rewriteReferences(content, + Map.of("./address.json", "https://www.example.org/schemas/address.json", "./ssn.json", + "https://www.example.org/schemas/ssn.json")); + ReferenceFinder finder = new JsonSchemaReferenceFinder(); Set externalReferences = finder.findExternalReferences(modifiedContent); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/address.json"))); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/ssn.json"))); + Assertions.assertTrue(externalReferences + .contains(new JsonPointerExternalReference("https://www.example.org/schemas/address.json"))); + Assertions.assertTrue(externalReferences + .contains(new JsonPointerExternalReference("https://www.example.org/schemas/ssn.json"))); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/OpenApiContentDereferencerTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/OpenApiContentDereferencerTest.java index 96f560dc67..b511b10edd 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/OpenApiContentDereferencerTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/dereference/OpenApiContentDereferencerTest.java @@ -18,14 +18,18 @@ public class OpenApiContentDereferencerTest extends ArtifactUtilProviderTestBase public void testRewriteReferences() { TypedContent content = resourceToTypedContentHandle("openapi-to-rewrite.json"); OpenApiDereferencer dereferencer = new OpenApiDereferencer(); - TypedContent modifiedContent = dereferencer.rewriteReferences(content, Map.of( - "./types/bar-types.json#/components/schemas/Bar", "https://www.example.org/schemas/bar-types.json#/components/schemas/Bar", - "./types/foo-types.json#/components/schemas/Foo", "https://www.example.org/schemas/foo-types.json#/components/schemas/Foo")); - + TypedContent modifiedContent = dereferencer.rewriteReferences(content, + Map.of("./types/bar-types.json#/components/schemas/Bar", + "https://www.example.org/schemas/bar-types.json#/components/schemas/Bar", + "./types/foo-types.json#/components/schemas/Foo", + "https://www.example.org/schemas/foo-types.json#/components/schemas/Foo")); + ReferenceFinder finder = new OpenApiReferenceFinder(); Set externalReferences = finder.findExternalReferences(modifiedContent); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/bar-types.json#/components/schemas/Bar"))); - Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference("https://www.example.org/schemas/foo-types.json#/components/schemas/Foo"))); + Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference( + "https://www.example.org/schemas/bar-types.json#/components/schemas/Bar"))); + Assertions.assertTrue(externalReferences.contains(new JsonPointerExternalReference( + "https://www.example.org/schemas/foo-types.json#/components/schemas/Foo"))); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinderTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinderTest.java index b374d6c4d6..a985c0e4be 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinderTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AsyncApiReferenceFinderTest.java @@ -10,7 +10,8 @@ public class AsyncApiReferenceFinderTest extends ArtifactUtilProviderTestBase { /** - * Test method for {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(TypedContent)} + * Test method for + * {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(TypedContent)} */ @Test public void testFindExternalReferences() { @@ -19,9 +20,10 @@ public void testFindExternalReferences() { Set foundReferences = finder.findExternalReferences(content); Assertions.assertNotNull(foundReferences); Assertions.assertEquals(2, foundReferences.size()); - Assertions.assertEquals(Set.of( - new JsonPointerExternalReference("./TradeKey.avsc"), - new JsonPointerExternalReference("./common-types.json#/components/schemas/User")), foundReferences); + Assertions.assertEquals( + Set.of(new JsonPointerExternalReference("./TradeKey.avsc"), + new JsonPointerExternalReference("./common-types.json#/components/schemas/User")), + foundReferences); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AvroReferenceFinderTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AvroReferenceFinderTest.java index 9b51dd7180..a3ee2eac1a 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AvroReferenceFinderTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/AvroReferenceFinderTest.java @@ -10,7 +10,8 @@ public class AvroReferenceFinderTest extends ArtifactUtilProviderTestBase { /** - * Test method for {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. + * Test method for + * {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. */ @Test public void testFindExternalReferences() { @@ -19,8 +20,7 @@ public void testFindExternalReferences() { Set foundReferences = finder.findExternalReferences(content); Assertions.assertNotNull(foundReferences); Assertions.assertEquals(2, foundReferences.size()); - Assertions.assertEquals(Set.of( - new ExternalReference("com.kubetrade.schema.trade.TradeKey"), + Assertions.assertEquals(Set.of(new ExternalReference("com.kubetrade.schema.trade.TradeKey"), new ExternalReference("com.kubetrade.schema.trade.TradeValue")), foundReferences); } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinderTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinderTest.java index 42ca7c9437..6ee15c4d99 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinderTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/JsonSchemaReferenceFinderTest.java @@ -10,7 +10,8 @@ public class JsonSchemaReferenceFinderTest extends ArtifactUtilProviderTestBase { /** - * Test method for {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. + * Test method for + * {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. */ @Test public void testFindExternalReferences() { @@ -19,7 +20,8 @@ public void testFindExternalReferences() { Set foundReferences = finder.findExternalReferences(content); Assertions.assertNotNull(foundReferences); Assertions.assertEquals(2, foundReferences.size()); - Assertions.assertEquals(Set.of(new JsonPointerExternalReference("./address.json"), new JsonPointerExternalReference("./ssn.json")), foundReferences); + Assertions.assertEquals(Set.of(new JsonPointerExternalReference("./address.json"), + new JsonPointerExternalReference("./ssn.json")), foundReferences); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/OpenApiReferenceFinderTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/OpenApiReferenceFinderTest.java index c6a222af11..eae30bff0b 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/OpenApiReferenceFinderTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/OpenApiReferenceFinderTest.java @@ -10,7 +10,8 @@ public class OpenApiReferenceFinderTest extends ArtifactUtilProviderTestBase { /** - * Test method for {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. + * Test method for + * {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. */ @Test public void testFindExternalReferences() { @@ -19,9 +20,10 @@ public void testFindExternalReferences() { Set foundReferences = finder.findExternalReferences(content); Assertions.assertNotNull(foundReferences); Assertions.assertEquals(2, foundReferences.size()); - Assertions.assertEquals(Set.of( - new JsonPointerExternalReference("./types/bar-types.json#/components/schemas/Bar"), - new JsonPointerExternalReference("./types/foo-types.json#/components/schemas/Foo")), foundReferences); + Assertions.assertEquals( + Set.of(new JsonPointerExternalReference("./types/bar-types.json#/components/schemas/Bar"), + new JsonPointerExternalReference("./types/foo-types.json#/components/schemas/Foo")), + foundReferences); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/ProtobufReferenceFinderTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/ProtobufReferenceFinderTest.java index 38a462ec9c..e3f3ed4d52 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/ProtobufReferenceFinderTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/content/refs/ProtobufReferenceFinderTest.java @@ -10,7 +10,8 @@ public class ProtobufReferenceFinderTest extends ArtifactUtilProviderTestBase { /** - * Test method for {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. + * Test method for + * {@link io.apicurio.registry.content.refs.AsyncApiReferenceFinder#findExternalReferences(io.apicurio.registry.content.ContentHandle)}. */ @Test public void testFindExternalReferences() { @@ -19,9 +20,8 @@ public void testFindExternalReferences() { Set foundReferences = finder.findExternalReferences(content); Assertions.assertNotNull(foundReferences); Assertions.assertEquals(3, foundReferences.size()); - Assertions.assertEquals(Set.of( - new ExternalReference("google/protobuf/timestamp.proto"), - new ExternalReference("sample/table_info.proto"), + Assertions.assertEquals(Set.of(new ExternalReference("google/protobuf/timestamp.proto"), + new ExternalReference("sample/table_info.proto"), new ExternalReference("sample/table_notification_type.proto")), foundReferences); } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ArtifactUtilProviderTestBase.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ArtifactUtilProviderTestBase.java index a4c79dfcb9..94faa4ebb5 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ArtifactUtilProviderTestBase.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ArtifactUtilProviderTestBase.java @@ -17,13 +17,13 @@ public class ArtifactUtilProviderTestBase { protected final String resourceToString(String resourceName) { try (InputStream stream = getClass().getResourceAsStream(resourceName)) { Assertions.assertNotNull(stream, "Resource not found: " + resourceName); - return new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines().collect(Collectors.joining("\n")); + return new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8)).lines() + .collect(Collectors.joining("\n")); } catch (IOException e) { throw new RuntimeException(e); } } - protected final ContentHandle resourceToContentHandle(String resourceName) { return ContentHandle.create(resourceToString(resourceName)); } @@ -33,7 +33,8 @@ protected final TypedContent resourceToTypedContentHandle(String resourceName) { if (resourceName.toLowerCase().endsWith("yaml") || resourceName.toLowerCase().endsWith("yml")) { ct = ContentTypes.APPLICATION_YAML; } - if (resourceName.toLowerCase().endsWith("xml") || resourceName.toLowerCase().endsWith("wsdl") || resourceName.toLowerCase().endsWith("xsd") ) { + if (resourceName.toLowerCase().endsWith("xml") || resourceName.toLowerCase().endsWith("wsdl") + || resourceName.toLowerCase().endsWith("xsd")) { ct = ContentTypes.APPLICATION_XML; } if (resourceName.toLowerCase().endsWith("proto")) { diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/AvroContentValidatorTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/AvroContentValidatorTest.java index 16df91ce22..27aa8dbc04 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/AvroContentValidatorTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/AvroContentValidatorTest.java @@ -39,16 +39,11 @@ public void testValidateReferences() throws Exception { // Properly map both required references - success. { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("com.example.search") - .artifactId("SearchResultType") - .version("1.0") - .name("com.example.search.SearchResultType").build()); - references.add(ArtifactReference.builder() - .groupId("com.example.actions") - .artifactId("UserAction") - .version("1.1") - .name("com.example.actions.UserAction").build()); + references.add( + ArtifactReference.builder().groupId("com.example.search").artifactId("SearchResultType") + .version("1.0").name("com.example.search.SearchResultType").build()); + references.add(ArtifactReference.builder().groupId("com.example.actions").artifactId("UserAction") + .version("1.1").name("com.example.actions.UserAction").build()); validator.validateReferences(content, references); } @@ -61,27 +56,20 @@ public void testValidateReferences() throws Exception { // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("com.example.search") - .artifactId("SearchResultType") - .version("1.0") - .name("com.example.search.SearchResultType").build()); + references.add( + ArtifactReference.builder().groupId("com.example.search").artifactId("SearchResultType") + .version("1.0").name("com.example.search.SearchResultType").build()); validator.validateReferences(content, references); }); // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("com.example.search") - .artifactId("SearchResultType") - .version("1.0") - .name("com.example.search.SearchResultType").build()); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("WrongType") - .version("2.3") - .name("com.example.invalid.WrongType").build()); + references.add( + ArtifactReference.builder().groupId("com.example.search").artifactId("SearchResultType") + .version("1.0").name("com.example.search.SearchResultType").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("WrongType") + .version("2.3").name("com.example.invalid.WrongType").build()); validator.validateReferences(content, references); }); } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidatorTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidatorTest.java index 130dbfb3b4..a659e9863b 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidatorTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/JsonSchemaContentValidatorTest.java @@ -43,8 +43,10 @@ public void testInvalidJsonSchemaFull() throws Exception { validator.validate(ValidityLevel.FULL, content, Collections.emptyMap()); }); Assertions.assertFalse(error.getCauses().isEmpty()); - Assertions.assertEquals("expected type: Number, found: Boolean", error.getCauses().iterator().next().getDescription()); - Assertions.assertEquals("#/items/properties/price/exclusiveMinimum", error.getCauses().iterator().next().getContext()); + Assertions.assertEquals("expected type: Number, found: Boolean", + error.getCauses().iterator().next().getDescription()); + Assertions.assertEquals("#/items/properties/price/exclusiveMinimum", + error.getCauses().iterator().next().getContext()); } @Test @@ -52,6 +54,7 @@ public void testJsonSchemaWithReferences() throws Exception { TypedContent city = resourceToTypedContentHandle("city.json"); TypedContent citizen = resourceToTypedContentHandle("citizen.json"); JsonSchemaContentValidator validator = new JsonSchemaContentValidator(); - validator.validate(ValidityLevel.FULL, citizen, Collections.singletonMap("https://example.com/city.json", city)); + validator.validate(ValidityLevel.FULL, citizen, + Collections.singletonMap("https://example.com/city.json", city)); } } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/OpenApiContentValidatorTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/OpenApiContentValidatorTest.java index a77a7a8ee4..ffd16e8575 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/OpenApiContentValidatorTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/OpenApiContentValidatorTest.java @@ -22,7 +22,6 @@ public void testValidSyntax() throws Exception { validator.validate(ValidityLevel.SYNTAX_ONLY, content, Collections.emptyMap()); } - @Test public void testValidSyntax_OpenApi31() throws Exception { TypedContent content = resourceToTypedContentHandle("openapi-valid-syntax-openapi31.json"); @@ -64,16 +63,10 @@ public void testValidateRefs() throws Exception { // Properly map both required references - success. { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("ExternalWidget") - .version("1.0") - .name("example.com#/components/schemas/ExternalWidget").build()); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("AnotherWidget") - .version("1.1") - .name("example.com#/components/schemas/AnotherWidget").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("ExternalWidget") + .version("1.0").name("example.com#/components/schemas/ExternalWidget").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("AnotherWidget") + .version("1.1").name("example.com#/components/schemas/AnotherWidget").build()); validator.validateReferences(content, references); } @@ -86,27 +79,18 @@ public void testValidateRefs() throws Exception { // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("AnotherWidget") - .version("1.1") - .name("example.com#/components/schemas/AnotherWidget").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("AnotherWidget") + .version("1.1").name("example.com#/components/schemas/AnotherWidget").build()); validator.validateReferences(content, references); }); // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("AnotherWidget") - .version("1.1") - .name("example.com#/components/schemas/AnotherWidget").build()); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("WrongWidget") - .version("2.3") - .name("example.com#/components/schemas/WrongWidget").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("AnotherWidget") + .version("1.1").name("example.com#/components/schemas/AnotherWidget").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("WrongWidget") + .version("2.3").name("example.com#/components/schemas/WrongWidget").build()); validator.validateReferences(content, references); }); } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ProtobufContentValidatorTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ProtobufContentValidatorTest.java index b84b7df81a..a20f2331b0 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ProtobufContentValidatorTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/ProtobufContentValidatorTest.java @@ -36,7 +36,8 @@ public void testValidateProtobufWithImports() throws Exception { TypedContent mode = resourceToTypedContentHandle("mode.proto"); TypedContent tableInfo = resourceToTypedContentHandle("table_info.proto"); ProtobufContentValidator validator = new ProtobufContentValidator(); - validator.validate(ValidityLevel.SYNTAX_ONLY, tableInfo, Collections.singletonMap("mode.proto", mode)); + validator.validate(ValidityLevel.SYNTAX_ONLY, tableInfo, + Collections.singletonMap("mode.proto", mode)); } @Test @@ -47,16 +48,10 @@ public void testValidateReferences() throws Exception { // Properly map both required references - success. { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("message2.proto") - .version("1.0") - .name("message2.proto").build()); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("message3.proto") - .version("1.1") - .name("message3.proto").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("message2.proto") + .version("1.0").name("message2.proto").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("message3.proto") + .version("1.1").name("message3.proto").build()); validator.validateReferences(content, references); } @@ -69,27 +64,18 @@ public void testValidateReferences() throws Exception { // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("message2.proto") - .version("1.0") - .name("message2.proto").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("message2.proto") + .version("1.0").name("message2.proto").build()); validator.validateReferences(content, references); }); // Only map one of the two required refs - failure. Assertions.assertThrows(RuleViolationException.class, () -> { List references = new ArrayList<>(); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("message2.proto") - .version("1.0") - .name("message2.proto").build()); - references.add(ArtifactReference.builder() - .groupId("default") - .artifactId("message4.proto") - .version("4.0") - .name("message4.proto").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("message2.proto") + .version("1.0").name("message2.proto").build()); + references.add(ArtifactReference.builder().groupId("default").artifactId("message4.proto") + .version("4.0").name("message4.proto").build()); validator.validateReferences(content, references); }); } diff --git a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/WsdlContentValidatorTest.java b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/WsdlContentValidatorTest.java index 5bbb70a91a..ae88c5cc57 100644 --- a/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/WsdlContentValidatorTest.java +++ b/schema-util/util-provider/src/test/java/io/apicurio/registry/rules/validity/WsdlContentValidatorTest.java @@ -38,7 +38,9 @@ public void testinValidSemantics() throws Exception { TypedContent content = resourceToTypedContentHandle("wsdl-invalid-semantics.wsdl"); WsdlContentValidator validator = new WsdlContentValidator(); Assertions.assertThrows(RuleViolationException.class, () -> { - //WSDLException faultCode=INVALID_WSDL: Encountered illegal extension element '{http://schemas.xmlsoap.org/wsdl/}element' in the context of a 'javax.wsdl.Types'. Extension elements must be in a namespace other than WSDL's + // WSDLException faultCode=INVALID_WSDL: Encountered illegal extension element + // '{http://schemas.xmlsoap.org/wsdl/}element' in the context of a 'javax.wsdl.Types'. Extension + // elements must be in a namespace other than WSDL's validator.validate(ValidityLevel.FULL, content, Collections.emptyMap()); }); } diff --git a/schema-util/wsdl/pom.xml b/schema-util/wsdl/pom.xml index c7b9bb346b..f71472d522 100644 --- a/schema-util/wsdl/pom.xml +++ b/schema-util/wsdl/pom.xml @@ -1,62 +1,60 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - - - apicurio-registry-schema-util-wsdl - jar - apicurio-registry-schema-util-wsdl - - - - - io.apicurio - apicurio-registry-schema-util-common - - - - io.apicurio - apicurio-registry-schema-util-xml - - - - wsdl4j - wsdl4j - - - - org.slf4j - slf4j-api - - - - - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-schema-util-wsdl + jar + apicurio-registry-schema-util-wsdl + + + + + io.apicurio + apicurio-registry-schema-util-common + + + + io.apicurio + apicurio-registry-schema-util-xml + + + + wsdl4j + wsdl4j + + + + org.slf4j + slf4j-api + + + + + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/wsdl/src/main/java/io/apicurio/registry/content/extract/WsdlOrXsdContentExtractor.java b/schema-util/wsdl/src/main/java/io/apicurio/registry/content/extract/WsdlOrXsdContentExtractor.java index 362660b28d..d13463dee3 100644 --- a/schema-util/wsdl/src/main/java/io/apicurio/registry/content/extract/WsdlOrXsdContentExtractor.java +++ b/schema-util/wsdl/src/main/java/io/apicurio/registry/content/extract/WsdlOrXsdContentExtractor.java @@ -1,13 +1,12 @@ package io.apicurio.registry.content.extract; -import java.io.InputStream; - +import io.apicurio.registry.content.ContentHandle; +import io.apicurio.registry.util.DocumentBuilderAccessor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; -import io.apicurio.registry.content.ContentHandle; -import io.apicurio.registry.util.DocumentBuilderAccessor; +import java.io.InputStream; /** * Performs meta-data extraction for WSDL or XSD content. diff --git a/schema-util/wsdl/src/main/java/io/apicurio/registry/rules/validity/WsdlContentValidator.java b/schema-util/wsdl/src/main/java/io/apicurio/registry/rules/validity/WsdlContentValidator.java index fae28ed53c..2485cb48d7 100644 --- a/schema-util/wsdl/src/main/java/io/apicurio/registry/rules/validity/WsdlContentValidator.java +++ b/schema-util/wsdl/src/main/java/io/apicurio/registry/rules/validity/WsdlContentValidator.java @@ -22,7 +22,8 @@ public WsdlContentValidator() { * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try (InputStream stream = content.getContent().stream()) { Document wsdlDoc = DocumentBuilderAccessor.getDocumentBuilder().parse(stream); @@ -31,7 +32,8 @@ public void validate(ValidityLevel level, TypedContent content, Map - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-xml - jar - apicurio-registry-schema-util-xml + apicurio-registry-schema-util-xml + jar + apicurio-registry-schema-util-xml - + - - io.apicurio - apicurio-registry-schema-util-common - + + io.apicurio + apicurio-registry-schema-util-common + - - org.apache.santuario - xmlsec - + + org.apache.santuario + xmlsec + - + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/xml/src/main/java/io/apicurio/registry/content/canon/XmlContentCanonicalizer.java b/schema-util/xml/src/main/java/io/apicurio/registry/content/canon/XmlContentCanonicalizer.java index 7f70dbcd91..dcafed781f 100644 --- a/schema-util/xml/src/main/java/io/apicurio/registry/content/canon/XmlContentCanonicalizer.java +++ b/schema-util/xml/src/main/java/io/apicurio/registry/content/canon/XmlContentCanonicalizer.java @@ -15,12 +15,12 @@ /** * A common XML content canonicalizer. - * */ public class XmlContentCanonicalizer implements ContentCanonicalizer { private static ThreadLocal xmlCanonicalizer = new ThreadLocal() { - @Override protected Canonicalizer initialValue() { + @Override + protected Canonicalizer initialValue() { try { return Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_OMIT_COMMENTS); } catch (InvalidCanonicalizerException e) { @@ -34,10 +34,10 @@ public class XmlContentCanonicalizer implements ContentCanonicalizer { } /** - * @see ContentCanonicalizer#canonicalize(TypedContent, Map) + * @see ContentCanonicalizer#canonicalize(TypedContent, Map) */ - @Override public TypedContent canonicalize(TypedContent content, - Map resolvedReferences) { + @Override + public TypedContent canonicalize(TypedContent content, Map resolvedReferences) { try { Canonicalizer canon = xmlCanonicalizer.get(); var out = new ByteArrayOutputStream(content.getContent().getSizeBytes()); diff --git a/schema-util/xml/src/main/java/io/apicurio/registry/rules/validity/XmlContentValidator.java b/schema-util/xml/src/main/java/io/apicurio/registry/rules/validity/XmlContentValidator.java index 5db7abff35..0d22a63f8e 100644 --- a/schema-util/xml/src/main/java/io/apicurio/registry/rules/validity/XmlContentValidator.java +++ b/schema-util/xml/src/main/java/io/apicurio/registry/rules/validity/XmlContentValidator.java @@ -19,24 +19,27 @@ public XmlContentValidator() { } /** - * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) + * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { if (level == ValidityLevel.SYNTAX_ONLY || level == ValidityLevel.FULL) { try (InputStream stream = content.getContent().stream()) { DocumentBuilderAccessor.getDocumentBuilder().parse(stream); } catch (Exception e) { - throw new RuleViolationException("Syntax violation for XML artifact.", RuleType.VALIDITY, level.name(), e); + throw new RuleViolationException("Syntax violation for XML artifact.", RuleType.VALIDITY, + level.name(), e); } } } /** - * @see io.apicurio.registry.rules.validity.ContentValidator#validateReferences(TypedContent, List) + * @see io.apicurio.registry.rules.validity.ContentValidator#validateReferences(TypedContent, List) */ @Override - public void validateReferences(TypedContent content, List references) throws RuleViolationException { + public void validateReferences(TypedContent content, List references) + throws RuleViolationException { // Note: not yet implemented! } diff --git a/schema-util/xml/src/main/java/io/apicurio/registry/util/DocumentBuilderAccessor.java b/schema-util/xml/src/main/java/io/apicurio/registry/util/DocumentBuilderAccessor.java index de766cb5dc..1febc093fc 100644 --- a/schema-util/xml/src/main/java/io/apicurio/registry/util/DocumentBuilderAccessor.java +++ b/schema-util/xml/src/main/java/io/apicurio/registry/util/DocumentBuilderAccessor.java @@ -26,7 +26,7 @@ protected DocumentBuilder initialValue() { return builder; } }; - + public static DocumentBuilder getDocumentBuilder() { return threadLocaldocBuilder.get(); } diff --git a/schema-util/xsd/pom.xml b/schema-util/xsd/pom.xml index 958c677d66..38723780d1 100644 --- a/schema-util/xsd/pom.xml +++ b/schema-util/xsd/pom.xml @@ -1,52 +1,50 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-schema-util-xsd - jar - apicurio-registry-schema-util-xsd + apicurio-registry-schema-util-xsd + jar + apicurio-registry-schema-util-xsd - + - - io.apicurio - apicurio-registry-schema-util-common - + + io.apicurio + apicurio-registry-schema-util-common + - - io.apicurio - apicurio-registry-schema-util-xml - + + io.apicurio + apicurio-registry-schema-util-xml + - + - - - - src/test/resources - true - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + diff --git a/schema-util/xsd/src/main/java/io/apicurio/registry/rules/validity/XsdContentValidator.java b/schema-util/xsd/src/main/java/io/apicurio/registry/rules/validity/XsdContentValidator.java index 4fcdf2e6d3..c5bcfb333d 100644 --- a/schema-util/xsd/src/main/java/io/apicurio/registry/rules/validity/XsdContentValidator.java +++ b/schema-util/xsd/src/main/java/io/apicurio/registry/rules/validity/XsdContentValidator.java @@ -5,11 +5,12 @@ import io.apicurio.registry.types.RuleType; import io.apicurio.registry.util.SchemaFactoryAccessor; -import javax.xml.transform.Source; -import javax.xml.transform.stream.StreamSource; import java.io.InputStream; import java.util.Map; +import javax.xml.transform.Source; +import javax.xml.transform.stream.StreamSource; + public class XsdContentValidator extends XmlContentValidator { /** @@ -17,11 +18,13 @@ public class XsdContentValidator extends XmlContentValidator { */ public XsdContentValidator() { } + /** - * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) + * @see io.apicurio.registry.rules.validity.ContentValidator#validate(ValidityLevel, TypedContent, Map) */ @Override - public void validate(ValidityLevel level, TypedContent content, Map resolvedReferences) throws RuleViolationException { + public void validate(ValidityLevel level, TypedContent content, + Map resolvedReferences) throws RuleViolationException { super.validate(level, content, resolvedReferences); if (level == ValidityLevel.FULL) { @@ -30,7 +33,8 @@ public void validate(ValidityLevel level, TypedContent content, Map threadLocalSchemaFactory = new ThreadLocal() { @Override protected SchemaFactory initialValue() { - SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); - try { - factory.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, ""); - factory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, ""); - } catch (SAXNotRecognizedException | SAXNotSupportedException e) { - // Don't care. - } - return factory; + SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); + try { + factory.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, ""); + factory.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, ""); + } catch (SAXNotRecognizedException | SAXNotSupportedException e) { + // Don't care. + } + return factory; } }; - + public static final SchemaFactory getSchemaFactory() { return threadLocalSchemaFactory.get(); } diff --git a/serdes/avro-serde/pom.xml b/serdes/avro-serde/pom.xml index 4abe3e4659..c64723e385 100644 --- a/serdes/avro-serde/pom.xml +++ b/serdes/avro-serde/pom.xml @@ -1,30 +1,27 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-serdes-avro-serde - jar - apicurio-registry-serdes-avro-serde + apicurio-registry-serdes-avro-serde + jar + apicurio-registry-serdes-avro-serde - - - io.apicurio - apicurio-registry-serde-common - + + + io.apicurio + apicurio-registry-serde-common + + + + org.apache.avro + avro + + - - org.apache.avro - avro - - - diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroDatumProvider.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroDatumProvider.java index 7f93ec3b9b..aae8aa380d 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroDatumProvider.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroDatumProvider.java @@ -6,7 +6,8 @@ public interface AvroDatumProvider { - default void configure(AvroKafkaSerdeConfig config) {} + default void configure(AvroKafkaSerdeConfig config) { + } DatumWriter createDatumWriter(T data, Schema schema); diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroEncoding.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroEncoding.java index 0dfe509b8d..562ea88b0e 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroEncoding.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroEncoding.java @@ -2,7 +2,6 @@ public enum AvroEncoding { - BINARY, - JSON; + BINARY, JSON; } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaDeserializer.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaDeserializer.java index f372943f77..2eaee94041 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaDeserializer.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaDeserializer.java @@ -1,5 +1,15 @@ package io.apicurio.registry.serde.avro; +import io.apicurio.registry.resolver.ParsedSchema; +import io.apicurio.registry.resolver.SchemaParser; +import io.apicurio.registry.resolver.utils.Utils; +import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.serde.AbstractKafkaDeserializer; +import org.apache.avro.Schema; +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.DecoderFactory; +import org.apache.kafka.common.header.Headers; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.UncheckedIOException; @@ -8,17 +18,6 @@ import java.util.Objects; import java.util.function.Consumer; -import org.apache.avro.Schema; -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.DecoderFactory; -import org.apache.kafka.common.header.Headers; - -import io.apicurio.registry.resolver.ParsedSchema; -import io.apicurio.registry.resolver.SchemaParser; -import io.apicurio.registry.resolver.utils.Utils; -import io.apicurio.registry.rest.client.RegistryClient; -import io.apicurio.registry.serde.AbstractKafkaDeserializer; - public class AvroKafkaDeserializer extends AbstractKafkaDeserializer { private final DecoderFactory decoderFactory = DecoderFactory.get(); @@ -53,7 +52,7 @@ public void configure(Map configs, boolean isKey) { avroHeaders = new AvroSerdeHeaders(isKey); - //important to instantiate the SchemaParser before calling super.configure + // important to instantiate the SchemaParser before calling super.configure parser = new AvroSchemaParser<>(avroDatumProvider); super.configure(config, isKey); @@ -73,9 +72,10 @@ protected U readData(ParsedSchema schema, ByteBuffer buffer, int start, } @Override - protected U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, int length) { + protected U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, + int length) { AvroEncoding encoding = null; - if (headers != null){ + if (headers != null) { String encodingHeader = avroHeaders.getEncoding(headers); if (encodingHeader != null) { encoding = AvroEncoding.valueOf(encodingHeader); @@ -87,11 +87,12 @@ protected U readData(Headers headers, ParsedSchema schema, ByteBuffer bu } try { DatumReader reader = avroDatumProvider.createDatumReader(schema.getParsedSchema()); - if( encoding == AvroEncoding.JSON) { + if (encoding == AvroEncoding.JSON) { // copy the data into a new byte[] byte[] msgData = new byte[length]; System.arraycopy(buffer.array(), start, msgData, 0, length); - return reader.read(null, decoderFactory.jsonDecoder(schema.getParsedSchema(), new ByteArrayInputStream(msgData))); + return reader.read(null, decoderFactory.jsonDecoder(schema.getParsedSchema(), + new ByteArrayInputStream(msgData))); } else { return reader.read(null, decoderFactory.binaryDecoder(buffer.array(), start, length, null)); } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerdeConfig.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerdeConfig.java index fe25727031..05e78b398d 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerdeConfig.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerdeConfig.java @@ -1,18 +1,18 @@ package io.apicurio.registry.serde.avro; -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; public class AvroKafkaSerdeConfig extends BaseKafkaSerDeConfig { /** * Used by the Avro serde classes to choose an io.apicurio.registry.serde.avro.AvroEncoding, - * for example JSON or BINARY. Serializer and Deserializer configuration must match. + * for example JSON or BINARY. Serializer and Deserializer configuration must + * match. */ public static final String AVRO_ENCODING = "apicurio.registry.avro.encoding"; public static final String AVRO_ENCODING_JSON = "JSON"; @@ -24,17 +24,20 @@ public class AvroKafkaSerdeConfig extends BaseKafkaSerDeConfig { public static final String USE_SPECIFIC_AVRO_READER = "apicurio.registry.use-specific-avro-reader"; public static final boolean USE_SPECIFIC_AVRO_READER_DEFAULT = false; - private static ConfigDef configDef() { ConfigDef configDef = new ConfigDef() - .define(AVRO_ENCODING, Type.STRING, AvroEncoding.BINARY.name(), Importance.MEDIUM, "TODO docs") - .define(AVRO_DATUM_PROVIDER, Type.CLASS, AVRO_DATUM_PROVIDER_DEFAULT, Importance.MEDIUM, "TODO docs") - .define(USE_SPECIFIC_AVRO_READER, Type.BOOLEAN, USE_SPECIFIC_AVRO_READER_DEFAULT, Importance.MEDIUM, "TODO docs"); + .define(AVRO_ENCODING, Type.STRING, AvroEncoding.BINARY.name(), Importance.MEDIUM, + "TODO docs") + .define(AVRO_DATUM_PROVIDER, Type.CLASS, AVRO_DATUM_PROVIDER_DEFAULT, Importance.MEDIUM, + "TODO docs") + .define(USE_SPECIFIC_AVRO_READER, Type.BOOLEAN, USE_SPECIFIC_AVRO_READER_DEFAULT, + Importance.MEDIUM, "TODO docs"); return configDef; } /** * Constructor. + * * @param configDef * @param originals */ diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerializer.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerializer.java index 606e6b05ad..ab6bbc2040 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerializer.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroKafkaSerializer.java @@ -1,17 +1,5 @@ package io.apicurio.registry.serde.avro; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Map; -import java.util.Objects; -import java.util.function.Consumer; - -import org.apache.avro.Schema; -import org.apache.avro.io.DatumWriter; -import org.apache.avro.io.Encoder; -import org.apache.avro.io.EncoderFactory; -import org.apache.kafka.common.header.Headers; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.SchemaParser; import io.apicurio.registry.resolver.SchemaResolver; @@ -19,6 +7,17 @@ import io.apicurio.registry.resolver.utils.Utils; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.serde.AbstractKafkaSerializer; +import org.apache.avro.Schema; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.Encoder; +import org.apache.avro.io.EncoderFactory; +import org.apache.kafka.common.header.Headers; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Map; +import java.util.Objects; +import java.util.function.Consumer; public class AvroKafkaSerializer extends AbstractKafkaSerializer { @@ -64,7 +63,7 @@ public void configure(Map configs, boolean isKey) { avroHeaders = new AvroSerdeHeaders(isKey); - //important to instantiate the SchemaParser before calling super.configure + // important to instantiate the SchemaParser before calling super.configure parser = new AvroSchemaParser<>(avroDatumProvider); super.configure(config, isKey); @@ -79,7 +78,8 @@ public SchemaParser schemaParser() { } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, + * java.lang.Object, java.io.OutputStream) */ @SuppressWarnings("unchecked") @Override @@ -88,7 +88,7 @@ protected void serializeData(ParsedSchema schema, U data, OutputStream o // I guess this can happen if generics are lost with reflection ... if (data instanceof NonRecordContainer) { - //noinspection unchecked + // noinspection unchecked data = (U) NonRecordContainer.class.cast(data).getValue(); } @@ -98,10 +98,12 @@ protected void serializeData(ParsedSchema schema, U data, OutputStream o } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) */ @Override - protected void serializeData(Headers headers, ParsedSchema schema, U data, OutputStream out) throws IOException { + protected void serializeData(Headers headers, ParsedSchema schema, U data, OutputStream out) + throws IOException { if (headers != null) { avroHeaders.addEncodingHeader(headers, encoding.name()); } @@ -109,7 +111,7 @@ protected void serializeData(Headers headers, ParsedSchema schema, U dat } private Encoder createEncoder(Schema schema, OutputStream os) throws IOException { - if(encoding == AvroEncoding.JSON) { + if (encoding == AvroEncoding.JSON) { return encoderFactory.jsonEncoder(schema, os); } else { return encoderFactory.directBinaryEncoder(os, null); diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaParser.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaParser.java index 9da6085e77..0059d2ce53 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaParser.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaParser.java @@ -35,7 +35,8 @@ public String artifactType() { */ @Override public Schema parseSchema(byte[] rawSchema, Map> resolvedReferences) { - return AvroSchemaUtils.parse(IoUtil.toString(rawSchema), new ArrayList<>(resolvedReferences.values())); + return AvroSchemaUtils.parse(IoUtil.toString(rawSchema), + new ArrayList<>(resolvedReferences.values())); } /** @@ -45,11 +46,10 @@ public Schema parseSchema(byte[] rawSchema, Map> re public ParsedSchema getSchemaFromData(Record data) { Schema schema = avroDatumProvider.toSchema(data.payload()); final List> resolvedReferences = handleReferences(schema); - return new ParsedSchemaImpl() - .setParsedSchema(schema) - .setReferenceName(schema.getFullName()) + return new ParsedSchemaImpl().setParsedSchema(schema).setReferenceName(schema.getFullName()) .setSchemaReferences(resolvedReferences) - .setRawSchema(IoUtil.toBytes(schema.toString(resolvedReferences.stream().map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), false))); + .setRawSchema(IoUtil.toBytes(schema.toString(resolvedReferences.stream() + .map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), false))); } /** @@ -60,10 +60,8 @@ public ParsedSchema getSchemaFromData(Record data, boolean dereferenc if (dereference) { Schema schema = avroDatumProvider.toSchema(data.payload()); - return new ParsedSchemaImpl() - .setParsedSchema(schema) - .setReferenceName(schema.getFullName()) - .setRawSchema(IoUtil.toBytes(schema.toString())); + return new ParsedSchemaImpl().setParsedSchema(schema) + .setReferenceName(schema.getFullName()).setRawSchema(IoUtil.toBytes(schema.toString())); } else { return getSchemaFromData(data); } @@ -131,7 +129,6 @@ private void addComplexTypeSubSchema(List> schemaReferences } } - private List> handleRecord(Schema schema) { final List> schemaReferences = new ArrayList<>(); for (Schema.Field field : schema.getFields()) { @@ -139,13 +136,13 @@ private List> handleRecord(Schema schema) { final List> parsedSchemas = handleReferences(field.schema()); - byte[] rawSchema = IoUtil.toBytes(field.schema().toString(parsedSchemas.stream().map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), false)); + byte[] rawSchema = IoUtil.toBytes(field.schema().toString( + parsedSchemas.stream().map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), + false)); ParsedSchema referencedSchema = new ParsedSchemaImpl() - .setParsedSchema(field.schema()) - .setReferenceName(field.schema().getFullName()) - .setSchemaReferences(parsedSchemas) - .setRawSchema(rawSchema); + .setParsedSchema(field.schema()).setReferenceName(field.schema().getFullName()) + .setSchemaReferences(parsedSchemas).setRawSchema(rawSchema); schemaReferences.add(referencedSchema); } else if (field.schema().getType().equals(Schema.Type.UNION)) { @@ -166,26 +163,23 @@ private List> handleRecord(Schema schema) { } private ParsedSchema parseSchema(Schema schema, List> schemaReferences) { - byte[] rawSchema = IoUtil.toBytes(schema.toString(schemaReferences.stream().map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), false)); + byte[] rawSchema = IoUtil.toBytes(schema.toString( + schemaReferences.stream().map(ParsedSchema::getParsedSchema).collect(Collectors.toSet()), + false)); - return new ParsedSchemaImpl() - .setParsedSchema(schema) - .setReferenceName(schema.getFullName()) - .setSchemaReferences(schemaReferences) - .setRawSchema(rawSchema); + return new ParsedSchemaImpl().setParsedSchema(schema).setReferenceName(schema.getFullName()) + .setSchemaReferences(schemaReferences).setRawSchema(rawSchema); } private ParsedSchema handleEnum(Schema schema) { byte[] rawSchema = IoUtil.toBytes(schema.toString()); - return new ParsedSchemaImpl() - .setParsedSchema(schema) - .setReferenceName(schema.getFullName()) - .setSchemaReferences(Collections.emptyList()) - .setRawSchema(rawSchema); + return new ParsedSchemaImpl().setParsedSchema(schema).setReferenceName(schema.getFullName()) + .setSchemaReferences(Collections.emptyList()).setRawSchema(rawSchema); } public boolean isComplexType(Schema.Type type) { - return type == Schema.Type.ARRAY || type == Schema.Type.MAP || type == Schema.Type.RECORD || type == Schema.Type.ENUM || type == Schema.Type.UNION; + return type == Schema.Type.ARRAY || type == Schema.Type.MAP || type == Schema.Type.RECORD + || type == Schema.Type.ENUM || type == Schema.Type.UNION; } } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaUtils.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaUtils.java index 226b26b339..eef5d172c1 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaUtils.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSchemaUtils.java @@ -41,12 +41,13 @@ public static Schema parse(String schema) { } public static Schema parse(String schema, List> references) { - //First try to parse without references, useful when the content is dereferenced + // First try to parse without references, useful when the content is dereferenced try { final Schema.Parser parser = new Schema.Parser(); return parser.parse(schema); } catch (SchemaParseException e) { - //If we fail to parse the content from the main schema, then parse first the references and then the main schema + // If we fail to parse the content from the main schema, then parse first the references and then + // the main schema final Schema.Parser parser = new Schema.Parser(); handleReferences(parser, references); return parser.parse(schema); @@ -68,7 +69,7 @@ public static boolean isPrimitive(Schema schema) { return primitiveSchemas.containsValue(schema); } - static Schema getReflectSchema(ReflectData reflectData,Object object) { + static Schema getReflectSchema(ReflectData reflectData, Object object) { Class clazz = (object instanceof Class) ? (Class) object : object.getClass(); Schema schema = reflectData.getSchema(clazz); if (schema == null) { @@ -101,8 +102,8 @@ static Schema getSchema(Object object, boolean useReflection) { } else if (useReflection) { Schema schema = ReflectData.get().getSchema(object.getClass()); if (schema == null) { - throw new SerializationException("Schema is null for object of class " + object.getClass() - .getCanonicalName()); + throw new SerializationException( + "Schema is null for object of class " + object.getClass().getCanonicalName()); } else { return schema; } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSerdeHeaders.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSerdeHeaders.java index 009503ff6c..82c2661d5e 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSerdeHeaders.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/AvroSerdeHeaders.java @@ -1,12 +1,11 @@ package io.apicurio.registry.serde.avro; +import io.apicurio.registry.serde.SerdeHeaders; +import io.apicurio.registry.utils.IoUtil; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.header.internals.RecordHeader; -import io.apicurio.registry.serde.SerdeHeaders; -import io.apicurio.registry.utils.IoUtil; - public class AvroSerdeHeaders { private final String encodingHeaderName; diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/DefaultAvroDatumProvider.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/DefaultAvroDatumProvider.java index ce16981ec8..858e2313b0 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/DefaultAvroDatumProvider.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/DefaultAvroDatumProvider.java @@ -1,8 +1,5 @@ package io.apicurio.registry.serde.avro; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; @@ -13,6 +10,9 @@ import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.specific.SpecificRecord; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + public class DefaultAvroDatumProvider implements AvroDatumProvider { private Boolean useSpecificAvroReader; private Map schemas = new ConcurrentHashMap<>(); @@ -45,15 +45,12 @@ private Schema getReaderSchema(Schema schema) { return readerClass.getConstructor().newInstance().getSchema(); } catch (Exception e) { throw new IllegalStateException(String.format("Error getting schema [%s]: %s", - schema.getFullName(), - readerClass.getName()), - e); + schema.getFullName(), readerClass.getName()), e); } } else { - throw new IllegalArgumentException("Could not find class " - + schema.getFullName() - + " specified in writer's schema whilst finding reader's " - + "schema for a SpecificRecord."); + throw new IllegalArgumentException("Could not find class " + schema.getFullName() + + " specified in writer's schema whilst finding reader's " + + "schema for a SpecificRecord."); } }); } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/NonRecordContainer.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/NonRecordContainer.java index 451913764e..d17d50a8b3 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/NonRecordContainer.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/NonRecordContainer.java @@ -40,8 +40,7 @@ public boolean equals(Object o) { return false; } NonRecordContainer that = (NonRecordContainer) o; - return Objects.equals(schema, that.schema) - && Objects.equals(value, that.value); + return Objects.equals(schema, that.schema) && Objects.equals(value, that.value); } @Override diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAllowNullAvroDatumProvider.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAllowNullAvroDatumProvider.java index e9a696ac05..81b8390c0e 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAllowNullAvroDatumProvider.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAllowNullAvroDatumProvider.java @@ -9,6 +9,6 @@ public ReflectAllowNullAvroDatumProvider() { } public ReflectAllowNullAvroDatumProvider(Class clazz) { - super(ReflectData.AllowNull.get(),clazz); + super(ReflectData.AllowNull.get(), clazz); } } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAvroDatumProvider.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAvroDatumProvider.java index cd1a3e3e04..49bf49e396 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAvroDatumProvider.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/ReflectAvroDatumProvider.java @@ -21,30 +21,30 @@ public ReflectAvroDatumProvider(ReflectData reflectData) { } public ReflectAvroDatumProvider(Class clazz) { - this(ReflectData.get(),clazz); + this(ReflectData.get(), clazz); } - public ReflectAvroDatumProvider(ReflectData reflectData,Class clazz) { + public ReflectAvroDatumProvider(ReflectData reflectData, Class clazz) { this(reflectData); - this.readerSchema = AvroSchemaUtils.getReflectSchema(reflectData,clazz); + this.readerSchema = AvroSchemaUtils.getReflectSchema(reflectData, clazz); } @Override public DatumWriter createDatumWriter(T data, Schema schema) { - return new ReflectDatumWriter<>(schema,reflectData); + return new ReflectDatumWriter<>(schema, reflectData); } @Override public DatumReader createDatumReader(Schema schema) { if (readerSchema == null) { - return new ReflectDatumReader<>(schema,schema,reflectData); + return new ReflectDatumReader<>(schema, schema, reflectData); } else { - return new ReflectDatumReader<>(schema, readerSchema,reflectData); + return new ReflectDatumReader<>(schema, readerSchema, reflectData); } } @Override public Schema toSchema(T data) { - return AvroSchemaUtils.getReflectSchema(reflectData,data); + return AvroSchemaUtils.getReflectSchema(reflectData, data); } } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/QualifiedRecordIdStrategy.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/QualifiedRecordIdStrategy.java index d32eb95deb..8909f55ba0 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/QualifiedRecordIdStrategy.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/QualifiedRecordIdStrategy.java @@ -1,25 +1,26 @@ package io.apicurio.registry.serde.avro.strategy; -import org.apache.avro.Schema; -import org.apache.kafka.common.errors.SerializationException; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.data.Record; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; import io.apicurio.registry.serde.strategy.ArtifactReference; +import org.apache.avro.Schema; +import org.apache.kafka.common.errors.SerializationException; public class QualifiedRecordIdStrategy implements ArtifactReferenceResolverStrategy { /** - * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, io.apicurio.registry.resolver.ParsedSchema) + * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, + * io.apicurio.registry.resolver.ParsedSchema) */ @Override - public io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { - if (parsedSchema != null && parsedSchema.getParsedSchema() != null && (parsedSchema.getParsedSchema().getType() == Schema.Type.RECORD || parsedSchema.getParsedSchema().getType() == Schema.Type.ENUM)) { - return ArtifactReference.builder() - .groupId(null) - .artifactId(parsedSchema.getParsedSchema().getFullName()) - .build(); + public io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference(Record data, + ParsedSchema parsedSchema) { + if (parsedSchema != null && parsedSchema.getParsedSchema() != null + && (parsedSchema.getParsedSchema().getType() == Schema.Type.RECORD + || parsedSchema.getParsedSchema().getType() == Schema.Type.ENUM)) { + return ArtifactReference.builder().groupId(null) + .artifactId(parsedSchema.getParsedSchema().getFullName()).build(); } throw new SerializationException("The message must only be an Avro record schema!"); } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/RecordIdStrategy.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/RecordIdStrategy.java index 2636fa7ba6..0c2d3f9a96 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/RecordIdStrategy.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/RecordIdStrategy.java @@ -1,29 +1,27 @@ package io.apicurio.registry.serde.avro.strategy; -import org.apache.avro.Schema; -import org.apache.kafka.common.errors.SerializationException; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.data.Record; -import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; +import org.apache.avro.Schema; +import org.apache.kafka.common.errors.SerializationException; public class RecordIdStrategy implements ArtifactReferenceResolverStrategy { /** - * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, io.apicurio.registry.resolver.ParsedSchema) + * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, + * io.apicurio.registry.resolver.ParsedSchema) */ @Override public ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { Schema schema = parsedSchema.getParsedSchema(); - if (schema != null && (schema.getType() == Schema.Type.RECORD || schema.getType() == Schema.Type.ENUM)) { - return ArtifactReference.builder() - .groupId(schema.getNamespace()) - .artifactId(schema.getName()) + if (schema != null + && (schema.getType() == Schema.Type.RECORD || schema.getType() == Schema.Type.ENUM)) { + return ArtifactReference.builder().groupId(schema.getNamespace()).artifactId(schema.getName()) .build(); } throw new SerializationException("The message must only be an Avro record schema!"); } - } diff --git a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/TopicRecordIdStrategy.java b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/TopicRecordIdStrategy.java index 0c88db12a8..aa0d20374d 100644 --- a/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/TopicRecordIdStrategy.java +++ b/serdes/avro-serde/src/main/java/io/apicurio/registry/serde/avro/strategy/TopicRecordIdStrategy.java @@ -1,28 +1,24 @@ package io.apicurio.registry.serde.avro.strategy; -import org.apache.avro.Schema; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.data.Record; -import io.apicurio.registry.serde.data.KafkaSerdeRecord; import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.serde.data.KafkaSerdeRecord; +import org.apache.avro.Schema; public class TopicRecordIdStrategy extends RecordIdStrategy { /** - * @see io.apicurio.registry.serde.avro.strategy.RecordIdStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, io.apicurio.registry.resolver.ParsedSchema) + * @see io.apicurio.registry.serde.avro.strategy.RecordIdStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, + * io.apicurio.registry.resolver.ParsedSchema) */ @Override public ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { ArtifactReference reference = super.artifactReference(data, parsedSchema); KafkaSerdeRecord kdata = (KafkaSerdeRecord) data; - return ArtifactReference.builder() - .groupId(reference.getGroupId()) + return ArtifactReference.builder().groupId(reference.getGroupId()) .artifactId(kdata.metadata().getTopic() + "-" + reference.getArtifactId()) - .version(reference.getVersion()) - .build(); + .version(reference.getVersion()).build(); } - - } diff --git a/serdes/jsonschema-serde/pom.xml b/serdes/jsonschema-serde/pom.xml index 37998ed1f8..42e428aa1e 100644 --- a/serdes/jsonschema-serde/pom.xml +++ b/serdes/jsonschema-serde/pom.xml @@ -1,39 +1,38 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-serdes-jsonschema-serde - jar - apicurio-registry-serdes-jsonschema-serde + apicurio-registry-serdes-jsonschema-serde + jar + apicurio-registry-serdes-jsonschema-serde - - - io.apicurio - apicurio-registry-serde-common - + + + io.apicurio + apicurio-registry-serde-common + - - com.fasterxml.jackson.core - jackson-core - + + com.fasterxml.jackson.core + jackson-core + - - com.networknt - json-schema-validator - + + com.networknt + json-schema-validator + - - org.json - json - + + org.json + json + - + diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializer.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializer.java index 61e7b051c8..b14aab1fb5 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializer.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializer.java @@ -22,7 +22,8 @@ import java.util.HashMap; import java.util.Map; -public class JsonSchemaKafkaDeserializer extends AbstractKafkaDeserializer implements Deserializer { +public class JsonSchemaKafkaDeserializer extends AbstractKafkaDeserializer + implements Deserializer { private ObjectMapper mapper; private Boolean validationEnabled; @@ -38,8 +39,7 @@ public JsonSchemaKafkaDeserializer() { super(); } - public JsonSchemaKafkaDeserializer(RegistryClient client, - SchemaResolver schemaResolver) { + public JsonSchemaKafkaDeserializer(RegistryClient client, SchemaResolver schemaResolver) { super(client, schemaResolver); } @@ -75,7 +75,8 @@ public void configure(Map configs, boolean isKey) { if (null == mapper) { mapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) - .setSerializationInclusion(JsonInclude.Include.NON_NULL);; + .setSerializationInclusion(JsonInclude.Include.NON_NULL); + ; } } @@ -96,7 +97,8 @@ public SchemaParser schemaParser() { } /** - * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) + * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(io.apicurio.registry.serde.ParsedSchema, + * java.nio.ByteBuffer, int, int) */ @Override protected T readData(ParsedSchema schema, ByteBuffer buffer, int start, int length) { @@ -104,14 +106,17 @@ protected T readData(ParsedSchema schema, ByteBuffer buffer, int sta } /** - * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) + * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) */ @Override - protected T readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, int length) { + protected T readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, + int length) { return internalReadData(headers, schema, buffer, start, length); } - private T internalReadData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, int length) { + private T internalReadData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, + int length) { byte[] data = new byte[length]; System.arraycopy(buffer.array(), start, data, 0, length); @@ -134,8 +139,9 @@ private T internalReadData(Headers headers, ParsedSchema schema, Byt if (javaTypeNode != null && !javaTypeNode.isNull()) { javaType = javaTypeNode.textValue(); } - //TODO if javaType is null, maybe warn something like this? - //You can try configure the property \"apicurio.registry.serde.json-schema.java-type\" with the full class name to use for deserialization + // TODO if javaType is null, maybe warn something like this? + // You can try configure the property \"apicurio.registry.serde.json-schema.java-type\" with + // the full class name to use for deserialization messageType = javaType == null ? null : Utils.loadClass(javaType); } else { String javaType = serdeHeaders.getMessageType(headers); @@ -143,7 +149,7 @@ private T internalReadData(Headers headers, ParsedSchema schema, Byt } if (messageType == null) { - //TODO maybe warn there is no message type and the deserializer will return a JsonNode + // TODO maybe warn there is no message type and the deserializer will return a JsonNode return mapper.readTree(parser); } else { return mapper.readValue(parser, messageType); diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializerConfig.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializerConfig.java index 8040c698d6..777e7ddfca 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializerConfig.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaDeserializerConfig.java @@ -1,25 +1,26 @@ package io.apicurio.registry.serde.jsonschema; -import static io.apicurio.registry.serde.SerdeConfig.*; - -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.*; public class JsonSchemaKafkaDeserializerConfig extends BaseKafkaSerDeConfig { - public static final String SPECIFIC_RETURN_CLASS_DOC = - "The specific class to use for deserializing the data into java objects"; + public static final String SPECIFIC_RETURN_CLASS_DOC = "The specific class to use for deserializing the data into java objects"; private static ConfigDef configDef() { ConfigDef configDef = new ConfigDef() - .define(DESERIALIZER_SPECIFIC_KEY_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, SPECIFIC_RETURN_CLASS_DOC) - .define(DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, SPECIFIC_RETURN_CLASS_DOC) - .define(VALIDATION_ENABLED, Type.BOOLEAN, VALIDATION_ENABLED_DEFAULT, Importance.MEDIUM, "Whether to validate the data against the json schema"); + .define(DESERIALIZER_SPECIFIC_KEY_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, + SPECIFIC_RETURN_CLASS_DOC) + .define(DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, + SPECIFIC_RETURN_CLASS_DOC) + .define(VALIDATION_ENABLED, Type.BOOLEAN, VALIDATION_ENABLED_DEFAULT, Importance.MEDIUM, + "Whether to validate the data against the json schema"); return configDef; } @@ -27,6 +28,7 @@ private static ConfigDef configDef() { /** * Constructor. + * * @param originals */ public JsonSchemaKafkaDeserializerConfig(Map originals, boolean isKey) { diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializer.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializer.java index acd2e2dd8e..06ab2b7cff 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializer.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializer.java @@ -23,9 +23,9 @@ * An implementation of the Kafka Serializer for JSON Schema use-cases. This serializer assumes that the * user's application needs to serialize a Java Bean to JSON data using Jackson. In addition to standard * serialization of the bean, this implementation can also optionally validate it against a JSON schema. - * */ -public class JsonSchemaKafkaSerializer extends AbstractKafkaSerializer implements Serializer { +public class JsonSchemaKafkaSerializer extends AbstractKafkaSerializer + implements Serializer { private ObjectMapper mapper; private final JsonSchemaParser parser = new JsonSchemaParser<>(); @@ -38,8 +38,8 @@ public JsonSchemaKafkaSerializer() { } public JsonSchemaKafkaSerializer(RegistryClient client, - ArtifactReferenceResolverStrategy artifactResolverStrategy, - SchemaResolver schemaResolver) { + ArtifactReferenceResolverStrategy artifactResolverStrategy, + SchemaResolver schemaResolver) { super(client, artifactResolverStrategy, schemaResolver); } @@ -71,7 +71,8 @@ public void configure(Map configs, boolean isKey) { serdeHeaders = new MessageTypeSerdeHeaders(new HashMap<>(configs), isKey); if (null == mapper) { - this.mapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + this.mapper = new ObjectMapper() + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .setSerializationInclusion(JsonInclude.Include.NON_NULL); } } @@ -99,20 +100,23 @@ public SchemaParser schemaParser() { return parser; } - /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, + * java.lang.Object, java.io.OutputStream) */ @Override - protected void serializeData(ParsedSchema schema, T data, OutputStream out) throws IOException { + protected void serializeData(ParsedSchema schema, T data, OutputStream out) + throws IOException { serializeData(null, schema, data, out); } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) */ @Override - protected void serializeData(Headers headers, ParsedSchema schema, T data, OutputStream out) throws IOException { + protected void serializeData(Headers headers, ParsedSchema schema, T data, OutputStream out) + throws IOException { final byte[] dataBytes = mapper.writeValueAsBytes(data); if (isValidationEnabled()) { JsonSchemaValidationUtil.validateDataWithSchema(schema, dataBytes, mapper); diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializerConfig.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializerConfig.java index a0ab2c6cb6..03c4666737 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializerConfig.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaKafkaSerializerConfig.java @@ -1,24 +1,24 @@ package io.apicurio.registry.serde.jsonschema; -import static io.apicurio.registry.serde.SerdeConfig.*; - -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.*; public class JsonSchemaKafkaSerializerConfig extends BaseKafkaSerDeConfig { private static ConfigDef configDef() { - return new ConfigDef() - .define(VALIDATION_ENABLED, Type.BOOLEAN, VALIDATION_ENABLED_DEFAULT, Importance.MEDIUM, "Whether to validate the data against the json schema"); + return new ConfigDef().define(VALIDATION_ENABLED, Type.BOOLEAN, VALIDATION_ENABLED_DEFAULT, + Importance.MEDIUM, "Whether to validate the data against the json schema"); } /** * Constructor. + * * @param originals */ public JsonSchemaKafkaSerializerConfig(Map originals) { diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaParser.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaParser.java index 1cbf212903..0fdaea97a8 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaParser.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaParser.java @@ -27,24 +27,26 @@ public String artifactType() { * @see io.apicurio.registry.serde.SchemaParser#parseSchema(byte[]) */ @Override - public JsonSchema parseSchema(byte[] rawSchema, Map> resolvedReferences) { + public JsonSchema parseSchema(byte[] rawSchema, + Map> resolvedReferences) { Map referenceSchemas = new HashMap<>(); resolveReferences(resolvedReferences, referenceSchemas); - JsonSchemaFactory schemaFactory = JsonSchemaFactory - .getInstance(SpecVersion.VersionFlag.V7, - builder -> builder.schemaLoaders(schemaLoaders -> schemaLoaders.schemas(referenceSchemas))); + JsonSchemaFactory schemaFactory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7, + builder -> builder.schemaLoaders(schemaLoaders -> schemaLoaders.schemas(referenceSchemas))); return schemaFactory.getSchema(IoUtil.toString(rawSchema)); } - private void resolveReferences(Map> resolvedReferences, Map referenceSchemas) { + private void resolveReferences(Map> resolvedReferences, + Map referenceSchemas) { resolvedReferences.forEach((referenceName, schema) -> { if (schema.hasReferences()) { - resolveReferences(schema.getSchemaReferences() - .stream() - .collect(Collectors.toMap(parsedSchema -> parsedSchema.getParsedSchema().getId(), parsedSchema -> parsedSchema)), referenceSchemas); + resolveReferences(schema.getSchemaReferences().stream() + .collect(Collectors.toMap(parsedSchema -> parsedSchema.getParsedSchema().getId(), + parsedSchema -> parsedSchema)), + referenceSchemas); } referenceSchemas.put(schema.getParsedSchema().getId(), IoUtil.toString(schema.getRawSchema())); @@ -56,19 +58,20 @@ private void resolveReferences(Map> resolvedRef */ @Override public ParsedSchema getSchemaFromData(Record data) { - //not supported for jsonschema type + // not supported for jsonschema type return null; } @Override public ParsedSchema getSchemaFromData(Record data, boolean dereference) { - //not supported for jsonschema type + // not supported for jsonschema type return null; } @Override public ParsedSchema getSchemaFromLocation(String location) { - String rawSchema = IoUtil.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(location)); + String rawSchema = IoUtil + .toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(location)); JsonSchemaFactory factory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7); return new ParsedSchemaImpl() .setParsedSchema(factory.getSchema(IoUtil.toStream(rawSchema))) diff --git a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaValidationUtil.java b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaValidationUtil.java index 1028ccbaf0..a25573664a 100644 --- a/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaValidationUtil.java +++ b/serdes/jsonschema-serde/src/main/java/io/apicurio/registry/serde/jsonschema/JsonSchemaValidationUtil.java @@ -7,9 +7,10 @@ import java.io.IOException; import java.util.Set; + /** * @author Carles Arnal -*/ + */ public class JsonSchemaValidationUtil { /** @@ -18,15 +19,18 @@ public class JsonSchemaValidationUtil { * @param mapper the object mapper to be used to read the data. * @throws IOException In case of validation errors, a IO exception is thrown. */ - protected static void validateDataWithSchema(ParsedSchema schema, byte[] data, ObjectMapper mapper) throws IOException { - final Set validationMessages = schema.getParsedSchema().validate(mapper.readTree(data)); + protected static void validateDataWithSchema(ParsedSchema schema, byte[] data, + ObjectMapper mapper) throws IOException { + final Set validationMessages = schema.getParsedSchema() + .validate(mapper.readTree(data)); if (validationMessages != null && !validationMessages.isEmpty()) { - //There are validation failures + // There are validation failures StringBuilder message = new StringBuilder(); - for (ValidationMessage validationMessage: validationMessages) { + for (ValidationMessage validationMessage : validationMessages) { message.append(validationMessage.getMessage()).append(" "); } - throw new IOException(String.format("Error validating data against json schema with message: %s", message)); + throw new IOException( + String.format("Error validating data against json schema with message: %s", message)); } } } diff --git a/serdes/protobuf-serde/pom.xml b/serdes/protobuf-serde/pom.xml index db289d6ce7..8615a962fb 100644 --- a/serdes/protobuf-serde/pom.xml +++ b/serdes/protobuf-serde/pom.xml @@ -1,84 +1,81 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-serdes-protobuf-serde - jar - apicurio-registry-serdes-protobuf-serde + apicurio-registry-serdes-protobuf-serde + jar + apicurio-registry-serdes-protobuf-serde - - - io.apicurio - apicurio-registry-serde-common - + + + io.apicurio + apicurio-registry-serde-common + - - io.apicurio - apicurio-registry-schema-util-protobuf - + + io.apicurio + apicurio-registry-schema-util-protobuf + - - com.google.protobuf - protobuf-java - + + com.google.protobuf + protobuf-java + - - com.squareup.wire - wire-schema - + + com.squareup.wire + wire-schema + - - com.squareup.wire - wire-compiler - - + + com.squareup.wire + wire-compiler + + - + - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + - + diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializer.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializer.java index 19d44f4437..e7bbd95b9d 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializer.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializer.java @@ -1,36 +1,35 @@ package io.apicurio.registry.serde.protobuf; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.UncheckedIOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import org.apache.kafka.common.config.ConfigException; -import org.apache.kafka.common.errors.SerializationException; -import org.apache.kafka.common.header.Headers; - import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.FileDescriptor; import com.google.protobuf.DynamicMessage; import com.google.protobuf.Message; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.SchemaParser; import io.apicurio.registry.resolver.SchemaResolver; import io.apicurio.registry.resolver.utils.Utils; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.serde.AbstractKafkaDeserializer; -import io.apicurio.registry.utils.protobuf.schema.ProtobufSchema; import io.apicurio.registry.serde.protobuf.ref.RefOuterClass.Ref; +import io.apicurio.registry.utils.protobuf.schema.ProtobufSchema; +import org.apache.kafka.common.config.ConfigException; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.header.Headers; -public class ProtobufKafkaDeserializer extends AbstractKafkaDeserializer { +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class ProtobufKafkaDeserializer + extends AbstractKafkaDeserializer { private static final String PROTOBUF_PARSE_METHOD = "parseFrom"; @@ -44,7 +43,6 @@ public class ProtobufKafkaDeserializer extends AbstractKafkaD private ProtobufSerdeHeaders serdeHeaders; - public ProtobufKafkaDeserializer() { super(); } @@ -71,15 +69,19 @@ public void configure(Map configs, boolean isKey) { try { if (specificReturnClass != null) { if (specificReturnClass.equals(DynamicMessage.class)) { - this.specificReturnClassParseMethod = specificReturnClass.getDeclaredMethod(PROTOBUF_PARSE_METHOD, Descriptor.class, InputStream.class); + this.specificReturnClassParseMethod = specificReturnClass + .getDeclaredMethod(PROTOBUF_PARSE_METHOD, Descriptor.class, InputStream.class); } else if (!specificReturnClass.equals(Object.class)) { - this.specificReturnClassParseMethod = specificReturnClass.getDeclaredMethod(PROTOBUF_PARSE_METHOD, InputStream.class); + this.specificReturnClassParseMethod = specificReturnClass + .getDeclaredMethod(PROTOBUF_PARSE_METHOD, InputStream.class); } else { - throw new ConfigException("Class " + specificReturnClass.getCanonicalName() + " is not a valid protobuf message class"); + throw new ConfigException("Class " + specificReturnClass.getCanonicalName() + + " is not a valid protobuf message class"); } } } catch (Exception e) { - throw new ConfigException("Class " + specificReturnClass.getCanonicalName() + " is not a valid protobuf message class", e); + throw new ConfigException("Class " + specificReturnClass.getCanonicalName() + + " is not a valid protobuf message class", e); } deriveClass = config.deriveClass(); @@ -96,15 +98,18 @@ public SchemaParser schemaParser() { } /** - * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) + * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) */ @Override - protected U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, int length) { + protected U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, + int length) { return internalReadData(headers, schema, buffer, start, length); } /** - * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(io.apicurio.registry.serde.ParsedSchema, java.nio.ByteBuffer, int, int) + * @see io.apicurio.registry.serde.AbstractKafkaDeserializer#readData(io.apicurio.registry.serde.ParsedSchema, + * java.nio.ByteBuffer, int, int) */ @Override protected U readData(ParsedSchema schema, ByteBuffer buffer, int start, int length) { @@ -112,7 +117,8 @@ protected U readData(ParsedSchema schema, ByteBuffer buffer, int } @SuppressWarnings("unchecked") - protected U internalReadData(Headers headers, ParsedSchema schema, ByteBuffer buff, int start, int length) { + protected U internalReadData(Headers headers, ParsedSchema schema, ByteBuffer buff, + int start, int length) { try { byte[] bytes = new byte[length]; System.arraycopy(buff.array(), start, bytes, 0, length); @@ -123,16 +129,18 @@ protected U internalReadData(Headers headers, ParsedSchema schem if (headers != null) { String messageTypeName = serdeHeaders.getProtobufTypeName(headers); if (messageTypeName != null) { - descriptor = schema.getParsedSchema().getFileDescriptor().findMessageTypeByName(messageTypeName); + descriptor = schema.getParsedSchema().getFileDescriptor() + .findMessageTypeByName(messageTypeName); } } - if (descriptor == null){ + if (descriptor == null) { try { Ref ref = Ref.parseDelimitedFrom(is); - descriptor = schema.getParsedSchema().getFileDescriptor().findMessageTypeByName(ref.getName()); + descriptor = schema.getParsedSchema().getFileDescriptor() + .findMessageTypeByName(ref.getName()); } catch (IOException e) { is = new ByteArrayInputStream(bytes); - //use the first message type found + // use the first message type found descriptor = schema.getParsedSchema().getFileDescriptor().getMessageTypes().get(0); } } @@ -173,7 +181,8 @@ private U invokeParseMethod(InputStream buffer, String className) { try { return protobufClass.getDeclaredMethod(PROTOBUF_PARSE_METHOD, InputStream.class); } catch (NoSuchMethodException | SecurityException e) { - throw new SerializationException("Class " + className + " is not a valid protobuf message class", e); + throw new SerializationException( + "Class " + className + " is not a valid protobuf message class", e); } }); return (U) parseMethod.invoke(null, buffer); @@ -183,7 +192,7 @@ private U invokeParseMethod(InputStream buffer, String className) { } } - //TODO refactor + // TODO refactor public String deriveClassFromDescriptor(Descriptor des) { Descriptor descriptor = des; FileDescriptor fd = descriptor.getFile(); @@ -210,6 +219,6 @@ public String deriveClassFromDescriptor(Descriptor des) { String d1 = (!outer.isEmpty() || inner.length() != 0 ? "." : ""); String d2 = (!outer.isEmpty() && inner.length() != 0 ? "$" : ""); return p + d1 + outer + d2 + inner; - } + } } diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializerConfig.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializerConfig.java index 8f860f3fe1..26b774cf58 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializerConfig.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaDeserializerConfig.java @@ -1,29 +1,29 @@ package io.apicurio.registry.serde.protobuf; -import static io.apicurio.registry.serde.SerdeConfig.*; - -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.*; public class ProtobufKafkaDeserializerConfig extends BaseKafkaSerDeConfig { - public static final String SPECIFIC_RETURN_CLASS_DOC = - "A class generated by Protocol buffers that the message value should be deserialized to"; + public static final String SPECIFIC_RETURN_CLASS_DOC = "A class generated by Protocol buffers that the message value should be deserialized to"; public static final String DERIVE_CLASS_FROM_SCHEMA = "apicurio.protobuf.derive.class"; - public static final String DERIVE_CLASS_FROM_SCHEMA_DOC = - "Whether to derive the class based on `java_outer_classname` and `java_multiple_files` from the Protobuf schema."; + public static final String DERIVE_CLASS_FROM_SCHEMA_DOC = "Whether to derive the class based on `java_outer_classname` and `java_multiple_files` from the Protobuf schema."; private static ConfigDef configDef() { ConfigDef configDef = new ConfigDef() - .define(DESERIALIZER_SPECIFIC_KEY_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, SPECIFIC_RETURN_CLASS_DOC) - .define(DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, SPECIFIC_RETURN_CLASS_DOC) - .define(DERIVE_CLASS_FROM_SCHEMA, Type.BOOLEAN, false, Importance.MEDIUM, DERIVE_CLASS_FROM_SCHEMA_DOC); + .define(DESERIALIZER_SPECIFIC_KEY_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, + SPECIFIC_RETURN_CLASS_DOC) + .define(DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS, Type.CLASS, null, Importance.MEDIUM, + SPECIFIC_RETURN_CLASS_DOC) + .define(DERIVE_CLASS_FROM_SCHEMA, Type.BOOLEAN, false, Importance.MEDIUM, + DERIVE_CLASS_FROM_SCHEMA_DOC); return configDef; } @@ -31,6 +31,7 @@ private static ConfigDef configDef() { /** * Constructor. + * * @param originals */ public ProtobufKafkaDeserializerConfig(Map originals, boolean isKey) { diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializer.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializer.java index 515bf2ab88..61a5bbab9a 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializer.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializer.java @@ -1,27 +1,25 @@ package io.apicurio.registry.serde.protobuf; -import java.io.IOException; -import java.io.OutputStream; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.kafka.common.errors.SerializationException; -import org.apache.kafka.common.header.Headers; - import com.google.protobuf.Message; - import io.apicurio.registry.protobuf.ProtobufDifference; import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.SchemaParser; import io.apicurio.registry.resolver.SchemaResolver; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; -import io.apicurio.registry.utils.protobuf.schema.ProtobufFile; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rules.compatibility.protobuf.ProtobufCompatibilityCheckerLibrary; import io.apicurio.registry.serde.AbstractKafkaSerializer; import io.apicurio.registry.serde.protobuf.ref.RefOuterClass.Ref; +import io.apicurio.registry.utils.protobuf.schema.ProtobufFile; import io.apicurio.registry.utils.protobuf.schema.ProtobufSchema; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.header.Headers; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class ProtobufKafkaSerializer extends AbstractKafkaSerializer { @@ -67,27 +65,34 @@ public SchemaParser schemaParser() { } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(io.apicurio.registry.serde.ParsedSchema, + * java.lang.Object, java.io.OutputStream) */ @Override - protected void serializeData(ParsedSchema schema, U data, OutputStream out) throws IOException { + protected void serializeData(ParsedSchema schema, U data, OutputStream out) + throws IOException { serializeData(null, schema, data, out); } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) + * @see io.apicurio.registry.serde.AbstractKafkaSerializer#serializeData(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.ParsedSchema, java.lang.Object, java.io.OutputStream) */ @Override - protected void serializeData(Headers headers, ParsedSchema schema, U data, OutputStream out) throws IOException { + protected void serializeData(Headers headers, ParsedSchema schema, U data, + OutputStream out) throws IOException { if (validationEnabled) { - if (schema.getParsedSchema() != null && schema.getParsedSchema().getFileDescriptor().findMessageTypeByName(data.getDescriptorForType().getName()) == null) { - throw new SerializationException("Missing message type " + data.getDescriptorForType().getName() + " in the protobuf schema"); + if (schema.getParsedSchema() != null && schema.getParsedSchema().getFileDescriptor() + .findMessageTypeByName(data.getDescriptorForType().getName()) == null) { + throw new SerializationException("Missing message type " + + data.getDescriptorForType().getName() + " in the protobuf schema"); } List diffs = validate(schema, data); if (!diffs.isEmpty()) { - throw new SerializationException("The data to send is not compatible with the schema. " + diffs); + throw new SerializationException( + "The data to send is not compatible with the schema. " + diffs); } } @@ -96,9 +101,7 @@ protected void serializeData(Headers headers, ParsedSchema schem serdeHeaders.addMessageTypeHeader(headers, data.getClass().getName()); serdeHeaders.addProtobufTypeNameHeader(headers, data.getDescriptorForType().getName()); } else { - Ref ref = Ref.newBuilder() - .setName(data.getDescriptorForType().getName()) - .build(); + Ref ref = Ref.newBuilder().setName(data.getDescriptorForType().getName()).build(); ref.writeDelimitedTo(out); } @@ -107,8 +110,10 @@ protected void serializeData(Headers headers, ParsedSchema schem private List validate(ParsedSchema schemaFromRegistry, U data) { ProtobufFile fileBefore = schemaFromRegistry.getParsedSchema().getProtobufFile(); - ProtobufFile fileAfter = new ProtobufFile(parser.toProtoFileElement(data.getDescriptorForType().getFile())); - ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, fileAfter); + ProtobufFile fileAfter = new ProtobufFile( + parser.toProtoFileElement(data.getDescriptorForType().getFile())); + ProtobufCompatibilityCheckerLibrary checker = new ProtobufCompatibilityCheckerLibrary(fileBefore, + fileAfter); return checker.findDifferences(); } diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializerConfig.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializerConfig.java index b26420fc3f..dd023342e2 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializerConfig.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufKafkaSerializerConfig.java @@ -1,26 +1,27 @@ package io.apicurio.registry.serde.protobuf; -import static io.apicurio.registry.serde.SerdeConfig.VALIDATION_ENABLED; -import static io.apicurio.registry.serde.SerdeConfig.VALIDATION_ENABLED_DEFAULT; - -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.VALIDATION_ENABLED; +import static io.apicurio.registry.serde.SerdeConfig.VALIDATION_ENABLED_DEFAULT; public class ProtobufKafkaSerializerConfig extends BaseKafkaSerDeConfig { private static ConfigDef configDef() { - ConfigDef configDef = new ConfigDef() - .define(VALIDATION_ENABLED, Type.BOOLEAN, VALIDATION_ENABLED_DEFAULT, Importance.MEDIUM, "Whether to validate the data being sent adheres to the schema being used"); + ConfigDef configDef = new ConfigDef().define(VALIDATION_ENABLED, Type.BOOLEAN, + VALIDATION_ENABLED_DEFAULT, Importance.MEDIUM, + "Whether to validate the data being sent adheres to the schema being used"); return configDef; } /** * Constructor. + * * @param originals */ public ProtobufKafkaSerializerConfig(Map originals) { diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSchemaParser.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSchemaParser.java index 75ceaaa23d..579437fed6 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSchemaParser.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSchemaParser.java @@ -38,10 +38,12 @@ public String artifactType() { * @see io.apicurio.registry.serde.SchemaParser#parseSchema(byte[]) */ @Override - public ProtobufSchema parseSchema(byte[] rawSchema, Map> resolvedReferences) { + public ProtobufSchema parseSchema(byte[] rawSchema, + Map> resolvedReferences) { try { - //textual .proto file - ProtoFileElement fileElem = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, IoUtil.toString(rawSchema)); + // textual .proto file + ProtoFileElement fileElem = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, + IoUtil.toString(rawSchema)); Map dependencies = new HashMap<>(); resolvedReferences.forEach((key, value) -> { dependencies.put(key, value.getParsedSchema().getProtoFileElement()); @@ -52,10 +54,11 @@ public ProtobufSchema parseSchema(byte[] rawSchema, Map> schemaReferences, Map dependencies) { + private void addReferencesToDependencies(List> schemaReferences, + Map dependencies) { schemaReferences.forEach(parsedSchema -> { - dependencies.put(parsedSchema.referenceName(), parsedSchema.getParsedSchema().getProtoFileElement()); + dependencies.put(parsedSchema.referenceName(), + parsedSchema.getParsedSchema().getProtoFileElement()); if (parsedSchema.hasReferences()) { addReferencesToDependencies(parsedSchema.getSchemaReferences(), dependencies); } @@ -106,11 +114,9 @@ public ParsedSchema getSchemaFromData(Record data) { byte[] rawSchema = IoUtil.toBytes(protoFileElement.toSchema()); - return new ParsedSchemaImpl() - .setParsedSchema(protobufSchema) + return new ParsedSchemaImpl().setParsedSchema(protobufSchema) .setReferenceName(protobufSchema.getFileDescriptor().getName()) - .setSchemaReferences(handleDependencies(schemaFileDescriptor)) - .setRawSchema(rawSchema); + .setSchemaReferences(handleDependencies(schemaFileDescriptor)).setRawSchema(rawSchema); } @Override @@ -123,15 +129,15 @@ private List> handleDependencies(FileDescriptor fil fileDescriptor.getDependencies().forEach(referenceFileDescriptor -> { ProtoFileElement referenceProtoFileElement = toProtoFileElement(referenceFileDescriptor); - ProtobufSchema referenceProtobufSchema = new ProtobufSchema(referenceFileDescriptor, referenceProtoFileElement); + ProtobufSchema referenceProtobufSchema = new ProtobufSchema(referenceFileDescriptor, + referenceProtoFileElement); byte[] rawSchema = IoUtil.toBytes(referenceProtoFileElement.toSchema()); ParsedSchema referencedSchema = new ParsedSchemaImpl() .setParsedSchema(referenceProtobufSchema) .setReferenceName(referenceProtobufSchema.getFileDescriptor().getName()) - .setSchemaReferences(handleDependencies(referenceFileDescriptor)) - .setRawSchema(rawSchema); + .setSchemaReferences(handleDependencies(referenceFileDescriptor)).setRawSchema(rawSchema); schemaReferences.add(referencedSchema); }); @@ -139,7 +145,8 @@ private List> handleDependencies(FileDescriptor fil } /** - * This method converts the Descriptor to a ProtoFileElement that allows to get a textual representation .proto file + * This method converts the Descriptor to a ProtoFileElement that allows to get a textual representation + * .proto file * * @param fileDescriptor * @return textual protobuf representation diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerde.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerde.java index ec3a9e8d8d..9a39d332f8 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerde.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerde.java @@ -1,7 +1,6 @@ package io.apicurio.registry.serde.protobuf; import com.google.protobuf.Message; - import io.apicurio.registry.serde.AbstractSerde; /**** @@ -13,4 +12,3 @@ public ProtobufSerde() { super(new ProtobufKafkaSerializer(), new ProtobufKafkaDeserializer()); } } - diff --git a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerdeHeaders.java b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerdeHeaders.java index 78003479d4..a261420d34 100644 --- a/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerdeHeaders.java +++ b/serdes/protobuf-serde/src/main/java/io/apicurio/registry/serde/protobuf/ProtobufSerdeHeaders.java @@ -1,12 +1,11 @@ package io.apicurio.registry.serde.protobuf; -import java.util.Map; - +import io.apicurio.registry.serde.headers.MessageTypeSerdeHeaders; +import io.apicurio.registry.utils.IoUtil; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.Headers; -import io.apicurio.registry.serde.headers.MessageTypeSerdeHeaders; -import io.apicurio.registry.utils.IoUtil; +import java.util.Map; public class ProtobufSerdeHeaders extends MessageTypeSerdeHeaders { @@ -14,6 +13,7 @@ public class ProtobufSerdeHeaders extends MessageTypeSerdeHeaders { /** * Constructor. + * * @param configs * @param isKey */ diff --git a/serdes/serde-common/pom.xml b/serdes/serde-common/pom.xml index 73ccb360f9..595e9f1b44 100644 --- a/serdes/serde-common/pom.xml +++ b/serdes/serde-common/pom.xml @@ -1,47 +1,44 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-serde-common - jar - apicurio-registry-serde-common + apicurio-registry-serde-common + jar + apicurio-registry-serde-common - - - io.apicurio - apicurio-registry-schema-resolver - + + + io.apicurio + apicurio-registry-schema-resolver + - - org.slf4j - slf4j-api - + + org.slf4j + slf4j-api + - - org.jboss.slf4j - slf4j-jboss-logging - ${jboss-slf4j.version} - + + org.jboss.slf4j + slf4j-jboss-logging + ${jboss-slf4j.version} + - - org.apache.kafka - kafka-clients - + + org.apache.kafka + kafka-clients + + + + org.junit.jupiter + junit-jupiter + test + + - - org.junit.jupiter - junit-jupiter - test - - - diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaDeserializer.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaDeserializer.java index 21f3425735..e881ccad3f 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaDeserializer.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaDeserializer.java @@ -1,12 +1,5 @@ package io.apicurio.registry.serde; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.ByteBuffer; - -import org.apache.kafka.common.header.Headers; -import org.apache.kafka.common.serialization.Deserializer; - import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.SchemaLookupResult; import io.apicurio.registry.resolver.SchemaResolver; @@ -17,8 +10,15 @@ import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import io.apicurio.registry.serde.fallback.DefaultFallbackArtifactProvider; import io.apicurio.registry.serde.fallback.FallbackArtifactProvider; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.serialization.Deserializer; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.ByteBuffer; -public abstract class AbstractKafkaDeserializer extends AbstractKafkaSerDe implements Deserializer { +public abstract class AbstractKafkaDeserializer extends AbstractKafkaSerDe + implements Deserializer { protected FallbackArtifactProvider fallbackArtifactProvider; @@ -39,7 +39,8 @@ public AbstractKafkaDeserializer(RegistryClient client, SchemaResolver sch } /** - * @see io.apicurio.registry.serde.AbstractKafkaSerDe#configure(io.apicurio.registry.serde.config.BaseKafkaSerDeConfig, boolean) + * @see io.apicurio.registry.serde.AbstractKafkaSerDe#configure(io.apicurio.registry.serde.config.BaseKafkaSerDeConfig, + * boolean) */ @Override protected void configure(BaseKafkaSerDeConfig config, boolean isKey) { @@ -48,12 +49,13 @@ protected void configure(BaseKafkaSerDeConfig config, boolean isKey) { BaseKafkaDeserializerConfig deserializerConfig = new BaseKafkaDeserializerConfig(config.originals()); Object fallbackProvider = deserializerConfig.getFallbackArtifactProvider(); - Utils.instantiate(FallbackArtifactProvider.class, fallbackProvider, this::setFallbackArtifactProvider); + Utils.instantiate(FallbackArtifactProvider.class, fallbackProvider, + this::setFallbackArtifactProvider); fallbackArtifactProvider.configure(config.originals(), isKey); if (fallbackArtifactProvider instanceof DefaultFallbackArtifactProvider) { if (!((DefaultFallbackArtifactProvider) fallbackArtifactProvider).isConfigured()) { - //it's not configured, just remove it so it's not executed + // it's not configured, just remove it so it's not executed fallbackArtifactProvider = null; } } @@ -69,7 +71,8 @@ public void setFallbackArtifactProvider(FallbackArtifactProvider fallbackArtifac protected abstract U readData(ParsedSchema schema, ByteBuffer buffer, int start, int length); - protected abstract U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, int length); + protected abstract U readData(Headers headers, ParsedSchema schema, ByteBuffer buffer, int start, + int length); @Override public U deserialize(String topic, byte[] data) { @@ -103,10 +106,11 @@ public U deserialize(String topic, Headers headers, byte[] data) { } if (data[0] == MAGIC_BYTE) { return deserialize(topic, data); - } else if (headers == null){ + } else if (headers == null) { throw new IllegalStateException("Headers cannot be null"); } else { - //try to read data even if artifactReference has no value, maybe there is a fallbackArtifactProvider configured + // try to read data even if artifactReference has no value, maybe there is a + // fallbackArtifactProvider configured return readData(topic, headers, data, artifactReference); } } @@ -121,7 +125,8 @@ private U readData(String topic, Headers headers, byte[] data, ArtifactReference return readData(headers, schema.getParsedSchema(), buffer, start, length); } - private SchemaLookupResult resolve(String topic, Headers headers, byte[] data, ArtifactReference artifactReference) { + private SchemaLookupResult resolve(String topic, Headers headers, byte[] data, + ArtifactReference artifactReference) { try { return getSchemaResolver().resolveSchemaByArtifactReference(artifactReference); } catch (RuntimeException e) { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerDe.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerDe.java index 0dae2189bf..2606447681 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerDe.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerDe.java @@ -1,15 +1,14 @@ package io.apicurio.registry.serde; -import org.apache.kafka.common.errors.SerializationException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import io.apicurio.registry.resolver.SchemaParser; import io.apicurio.registry.resolver.SchemaResolver; import io.apicurio.registry.resolver.utils.Utils; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import io.apicurio.registry.serde.headers.HeadersHandler; +import org.apache.kafka.common.errors.SerializationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.util.Map; @@ -17,7 +16,6 @@ /** * Common class for both serializer and deserializer. - * */ public abstract class AbstractKafkaSerDe extends SchemaResolverConfigurer { @@ -56,7 +54,8 @@ protected void configure(BaseKafkaSerDeConfig config, boolean isKey) { if (config.enableConfluentIdHandler()) { if (idHandler != null && !(idHandler instanceof Legacy4ByteIdHandler)) { - log.warn(String.format("Duplicate id-handler configuration: %s vs. %s", idh, "as-confluent")); + log.warn(String.format("Duplicate id-handler configuration: %s vs. %s", idh, + "as-confluent")); } setIdHandler(new Legacy4ByteIdHandler()); } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerializer.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerializer.java index 6bc35f76a2..c7f9be3608 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerializer.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractKafkaSerializer.java @@ -5,19 +5,20 @@ import io.apicurio.registry.resolver.SchemaResolver; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; import io.apicurio.registry.rest.client.RegistryClient; +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import io.apicurio.registry.serde.data.KafkaSerdeMetadata; +import io.apicurio.registry.serde.data.KafkaSerdeRecord; import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.serialization.Serializer; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; -import io.apicurio.registry.serde.data.KafkaSerdeRecord; -import io.apicurio.registry.serde.data.KafkaSerdeMetadata; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UncheckedIOException; import java.util.Map; -public abstract class AbstractKafkaSerializer extends AbstractKafkaSerDe implements Serializer { +public abstract class AbstractKafkaSerializer extends AbstractKafkaSerDe + implements Serializer { public AbstractKafkaSerializer() { super(); @@ -31,7 +32,9 @@ public AbstractKafkaSerializer(SchemaResolver schemaResolver) { super(schemaResolver); } - public AbstractKafkaSerializer(RegistryClient client, ArtifactReferenceResolverStrategy artifactResolverStrategy, SchemaResolver schemaResolver) { + public AbstractKafkaSerializer(RegistryClient client, + ArtifactReferenceResolverStrategy artifactResolverStrategy, + SchemaResolver schemaResolver) { super(client, schemaResolver); getSchemaResolver().setArtifactResolverStrategy(artifactResolverStrategy); } @@ -41,9 +44,11 @@ public void configure(Map configs, boolean isKey) { super.configure(new BaseKafkaSerDeConfig(configs), isKey); } - protected abstract void serializeData(ParsedSchema schema, U data, OutputStream out) throws IOException; + protected abstract void serializeData(ParsedSchema schema, U data, OutputStream out) + throws IOException; - protected abstract void serializeData(Headers headers, ParsedSchema schema, U data, OutputStream out) throws IOException; + protected abstract void serializeData(Headers headers, ParsedSchema schema, U data, OutputStream out) + throws IOException; @Override public byte[] serialize(String topic, U data) { @@ -60,7 +65,8 @@ public byte[] serialize(String topic, Headers headers, U data) { KafkaSerdeMetadata resolverMetadata = new KafkaSerdeMetadata(topic, isKey(), headers); - SchemaLookupResult schema = getSchemaResolver().resolveSchema(new KafkaSerdeRecord<>(resolverMetadata, data)); + SchemaLookupResult schema = getSchemaResolver() + .resolveSchema(new KafkaSerdeRecord<>(resolverMetadata, data)); ByteArrayOutputStream out = new ByteArrayOutputStream(); if (headersHandler != null && headers != null) { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractSchemaResolver.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractSchemaResolver.java index 948d526fdd..73d1d883de 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractSchemaResolver.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/AbstractSchemaResolver.java @@ -26,8 +26,8 @@ import static io.apicurio.registry.client.auth.VertXAuthFactory.buildSimpleAuthWebClient; /** - * This class is deprecated, it's recommended to migrate to the new implementation at {@link io.apicurio.registry.resolver.AbstractSchemaResolver} - * Base implementation of {@link SchemaResolver} + * This class is deprecated, it's recommended to migrate to the new implementation at + * {@link io.apicurio.registry.resolver.AbstractSchemaResolver} Base implementation of {@link SchemaResolver} */ @Deprecated public abstract class AbstractSchemaResolver implements SchemaResolver { @@ -45,9 +45,10 @@ public abstract class AbstractSchemaResolver implements SchemaResolver configs, io.apicurio.registry.resolver.SchemaParser schemaMapper) { + public void configure(Map configs, + io.apicurio.registry.resolver.SchemaParser schemaMapper) { this.schemaParser = schemaMapper; if (this.vertx == null) { @@ -55,7 +56,7 @@ public void configure(Map configs, io.apicurio.registry.resolver.Sche } Object isKeyFromConfig = configs.get(SerdeConfig.IS_KEY); - //is key have to come always, we set it + // is key have to come always, we set it configure(configs, (Boolean) isKeyFromConfig, new SchemaParser() { /** @@ -71,7 +72,7 @@ public String artifactType() { */ @Override public Object parseSchema(byte[] rawSchema) { - //Empty map passed as references. References are not supported when using this class. + // Empty map passed as references. References are not supported when using this class. return schemaMapper.parseSchema(rawSchema, Collections.emptyMap()); } @@ -79,7 +80,8 @@ public Object parseSchema(byte[] rawSchema) { } /** - * @see io.apicurio.registry.serde.SchemaResolver#configure(java.util.Map, boolean, io.apicurio.registry.serde.SchemaParser) + * @see io.apicurio.registry.serde.SchemaResolver#configure(java.util.Map, boolean, + * io.apicurio.registry.serde.SchemaParser) */ @Override public void configure(Map configs, boolean isKey, SchemaParser schemaParser) { @@ -93,7 +95,8 @@ public void configure(Map configs, boolean isKey, SchemaParser sch if (client == null) { String baseUrl = config.getRegistryUrl(); if (baseUrl == null) { - throw new IllegalArgumentException("Missing registry base url, set " + SerdeConfig.REGISTRY_URL); + throw new IllegalArgumentException( + "Missing registry base url, set " + SerdeConfig.REGISTRY_URL); } String authServerURL = config.getAuthServiceUrl(); @@ -101,7 +104,8 @@ public void configure(Map configs, boolean isKey, SchemaParser sch try { if (authServerURL != null || tokenEndpoint != null) { - client = configureClientWithBearerAuthentication(config, baseUrl, authServerURL, tokenEndpoint); + client = configureClientWithBearerAuthentication(config, baseUrl, authServerURL, + tokenEndpoint); } else { String username = config.getAuthUsername(); @@ -126,7 +130,8 @@ public void configure(Map configs, boolean isKey, SchemaParser sch schemaCache.configureRetryCount(config.getRetryCount()); schemaCache.configureGlobalIdKeyExtractor(SchemaLookupResult::getGlobalId); - schemaCache.configureContentKeyExtractor(schema -> Optional.ofNullable(schema.getRawSchema()).map(IoUtil::toString).orElse(null)); + schemaCache.configureContentKeyExtractor( + schema -> Optional.ofNullable(schema.getRawSchema()).map(IoUtil::toString).orElse(null)); schemaCache.configureContentIdKeyExtractor(SchemaLookupResult::getContentId); schemaCache.configureContentHashKeyExtractor(SchemaLookupResult::getContentHash); schemaCache.configureArtifactCoordinatesKeyExtractor(SchemaLookupResult::toArtifactCoordinates); @@ -159,7 +164,8 @@ public void setClient(RegistryClient client) { * @param artifactResolverStrategy the artifactResolverStrategy to set */ @Override - public void setArtifactResolverStrategy(ArtifactReferenceResolverStrategy artifactResolverStrategy) { + public void setArtifactResolverStrategy( + ArtifactReferenceResolverStrategy artifactResolverStrategy) { this.artifactResolverStrategy = artifactResolverStrategy; } @@ -179,9 +185,9 @@ public io.apicurio.registry.resolver.SchemaParser getSchemaParser() { } /** - * Resolve an artifact reference given the topic name, message headers, data, and optional parsed schema. This will use - * the artifact resolver strategy and then override the values from that strategy with any explicitly configured - * values (groupId, artifactId, version). + * Resolve an artifact reference given the topic name, message headers, data, and optional parsed schema. + * This will use the artifact resolver strategy and then override the values from that strategy with any + * explicitly configured values (groupId, artifactId, version). * * @param topic * @param headers @@ -194,23 +200,27 @@ protected ArtifactReference resolveArtifactReference(String topic, T data, Parse KafkaSerdeRecord record = new KafkaSerdeRecord(metadata, data); io.apicurio.registry.resolver.ParsedSchema ps = new ParsedSchemaImpl() - .setParsedSchema(parsedSchema.getParsedSchema()) - .setRawSchema(parsedSchema.getRawSchema()); + .setParsedSchema(parsedSchema.getParsedSchema()).setRawSchema(parsedSchema.getRawSchema()); - io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference = artifactResolverStrategy.artifactReference(record, ps); + io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference = artifactResolverStrategy + .artifactReference(record, ps); return ArtifactReference.builder() - .groupId(this.explicitArtifactGroupId == null ? artifactReference.getGroupId() : this.explicitArtifactGroupId) - .artifactId(this.explicitArtifactId == null ? artifactReference.getArtifactId() : this.explicitArtifactId) - .version(this.explicitArtifactVersion == null ? artifactReference.getVersion() : this.explicitArtifactVersion) + .groupId(this.explicitArtifactGroupId == null ? artifactReference.getGroupId() + : this.explicitArtifactGroupId) + .artifactId(this.explicitArtifactId == null ? artifactReference.getArtifactId() + : this.explicitArtifactId) + .version(this.explicitArtifactVersion == null ? artifactReference.getVersion() + : this.explicitArtifactVersion) .build(); } protected SchemaLookupResult resolveSchemaByGlobalId(long globalId) { return schemaCache.getByGlobalId(globalId, globalIdKey -> { - //TODO getContentByGlobalId have to return some minumum metadata (groupId, artifactId and version) - //TODO or at least add some method to the api to return the version metadata by globalId -// ArtifactMetaData artifactMetadata = client.getArtifactMetaData("TODO", artifactId); + // TODO getContentByGlobalId have to return some minumum metadata (groupId, artifactId and + // version) + // TODO or at least add some method to the api to return the version metadata by globalId + // ArtifactMetaData artifactMetadata = client.getArtifactMetaData("TODO", artifactId); InputStream rawSchema = client.ids().globalIds().byGlobalId(globalIdKey).get(); byte[] schema = IoUtil.toBytes(rawSchema); @@ -219,14 +229,11 @@ protected SchemaLookupResult resolveSchemaByGlobalId(long globalId) { SchemaLookupResult.SchemaLookupResultBuilder result = SchemaLookupResult.builder(); return result - //FIXME it's impossible to retrieve this info with only the globalId -// .groupId(null) -// .artifactId(null) -// .version(0) - .globalId(globalIdKey) - .rawSchema(schema) - .schema(parsed) - .build(); + // FIXME it's impossible to retrieve this info with only the globalId + // .groupId(null) + // .artifactId(null) + // .version(0) + .globalId(globalIdKey).rawSchema(schema).schema(parsed).build(); }); } @@ -245,7 +252,8 @@ public void close() throws IOException { } } - private RegistryClient configureClientWithBearerAuthentication(DefaultSchemaResolverConfig config, String registryUrl, String authServerUrl, String tokenEndpoint) { + private RegistryClient configureClientWithBearerAuthentication(DefaultSchemaResolverConfig config, + String registryUrl, String authServerUrl, String tokenEndpoint) { RequestAdapter auth; if (authServerUrl != null) { auth = configureAuthWithRealm(config, authServerUrl); @@ -263,7 +271,8 @@ private RequestAdapter configureAuthWithRealm(DefaultSchemaResolverConfig config throw new IllegalArgumentException("Missing registry auth realm, set " + SerdeConfig.AUTH_REALM); } - final String tokenEndpoint = authServerUrl + String.format(SerdeConfig.AUTH_SERVICE_URL_TOKEN_ENDPOINT, realm); + final String tokenEndpoint = authServerUrl + + String.format(SerdeConfig.AUTH_SERVICE_URL_TOKEN_ENDPOINT, realm); return configureAuthWithUrl(config, tokenEndpoint); } @@ -272,24 +281,29 @@ private RequestAdapter configureAuthWithUrl(DefaultSchemaResolverConfig config, final String clientId = config.getAuthClientId(); if (clientId == null) { - throw new IllegalArgumentException("Missing registry auth clientId, set " + SerdeConfig.AUTH_CLIENT_ID); + throw new IllegalArgumentException( + "Missing registry auth clientId, set " + SerdeConfig.AUTH_CLIENT_ID); } final String clientSecret = config.getAuthClientSecret(); if (clientSecret == null) { - throw new IllegalArgumentException("Missing registry auth secret, set " + SerdeConfig.AUTH_CLIENT_SECRET); + throw new IllegalArgumentException( + "Missing registry auth secret, set " + SerdeConfig.AUTH_CLIENT_SECRET); } - RequestAdapter adapter = new VertXRequestAdapter(buildOIDCWebClient(this.vertx, tokenEndpoint, clientId, clientSecret)); + RequestAdapter adapter = new VertXRequestAdapter( + buildOIDCWebClient(this.vertx, tokenEndpoint, clientId, clientSecret)); return adapter; } - private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig config, String registryUrl, String username) { + private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig config, + String registryUrl, String username) { final String password = config.getAuthPassword(); if (password == null) { - throw new IllegalArgumentException("Missing registry auth password, set " + SerdeConfig.AUTH_PASSWORD); + throw new IllegalArgumentException( + "Missing registry auth password, set " + SerdeConfig.AUTH_PASSWORD); } var adapter = new VertXRequestAdapter(buildSimpleAuthWebClient(this.vertx, username, password)); @@ -298,7 +312,8 @@ private RegistryClient configureClientWithBasicAuth(DefaultSchemaResolverConfig return new RegistryClient(adapter); } - protected void loadFromArtifactMetaData(VersionMetaData artifactMetadata, SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { + protected void loadFromArtifactMetaData(VersionMetaData artifactMetadata, + SchemaLookupResult.SchemaLookupResultBuilder resultBuilder) { resultBuilder.globalId(artifactMetadata.getGlobalId()); resultBuilder.contentId(artifactMetadata.getContentId()); resultBuilder.groupId(artifactMetadata.getGroupId()); diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/DefaultIdHandler.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/DefaultIdHandler.java index 392fc814c8..10e35d6503 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/DefaultIdHandler.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/DefaultIdHandler.java @@ -1,16 +1,15 @@ package io.apicurio.registry.serde; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import io.apicurio.registry.serde.config.IdOption; +import org.apache.kafka.common.errors.SerializationException; + import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Map; -import org.apache.kafka.common.errors.SerializationException; - -import io.apicurio.registry.resolver.strategy.ArtifactReference; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; -import io.apicurio.registry.serde.config.IdOption; - public class DefaultIdHandler implements IdHandler { static final int idSize = 8; // we use 8 / long @@ -27,7 +26,8 @@ public void writeId(ArtifactReference reference, OutputStream out) throws IOExce long id; if (idOption == IdOption.contentId) { if (reference.getContentId() == null) { - throw new SerializationException("Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); + throw new SerializationException( + "Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); } id = reference.getContentId(); } else { @@ -41,7 +41,8 @@ public void writeId(ArtifactReference reference, ByteBuffer buffer) { long id; if (idOption == IdOption.contentId) { if (reference.getContentId() == null) { - throw new SerializationException("Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); + throw new SerializationException( + "Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); } id = reference.getContentId(); } else { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/IdHandler.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/IdHandler.java index 543b99196f..1e94579f5a 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/IdHandler.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/IdHandler.java @@ -1,16 +1,14 @@ package io.apicurio.registry.serde; +import io.apicurio.registry.resolver.strategy.ArtifactReference; + import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Map; -import io.apicurio.registry.resolver.strategy.ArtifactReference; - - /** * Handle artifact id in the msg bytes. - * */ public interface IdHandler { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/Legacy4ByteIdHandler.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/Legacy4ByteIdHandler.java index aebfda6791..7b9272560d 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/Legacy4ByteIdHandler.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/Legacy4ByteIdHandler.java @@ -1,19 +1,17 @@ package io.apicurio.registry.serde; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import io.apicurio.registry.serde.config.IdOption; +import org.apache.kafka.common.errors.SerializationException; + import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Map; -import org.apache.kafka.common.errors.SerializationException; - -import io.apicurio.registry.resolver.strategy.ArtifactReference; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; -import io.apicurio.registry.serde.config.IdOption; - /** * IdHandler that assumes 4 bytes for the magic number (the ID). - * */ public class Legacy4ByteIdHandler implements IdHandler { static final int idSize = 4; // e.g. Confluent uses 4 / int @@ -34,7 +32,8 @@ public void writeId(ArtifactReference reference, OutputStream out) throws IOExce long id; if (idOption == IdOption.contentId) { if (reference.getContentId() == null) { - throw new SerializationException("Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); + throw new SerializationException( + "Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); } id = reference.getContentId(); } else { @@ -48,7 +47,8 @@ public void writeId(ArtifactReference reference, ByteBuffer buffer) { long id; if (idOption == IdOption.contentId) { if (reference.getContentId() == null) { - throw new SerializationException("Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); + throw new SerializationException( + "Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); } id = reference.getContentId(); } else { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchema.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchema.java index 43014d5fb4..aeb5ece354 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchema.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchema.java @@ -1,7 +1,8 @@ package io.apicurio.registry.serde; /** - * This interface is deprecated and eventually will be replaced by {@link io.apicurio.registry.resolver.ParsedSchema} + * This interface is deprecated and eventually will be replaced by + * {@link io.apicurio.registry.resolver.ParsedSchema} */ @Deprecated public interface ParsedSchema { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchemaImpl.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchemaImpl.java index 913aa76677..8cfd9f0239 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchemaImpl.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/ParsedSchemaImpl.java @@ -1,7 +1,8 @@ package io.apicurio.registry.serde; /** - * This class is deprecated and eventually will be replaced by {@link io.apicurio.registry.resolver.ParsedSchemaImpl} + * This class is deprecated and eventually will be replaced by + * {@link io.apicurio.registry.resolver.ParsedSchemaImpl} */ @Deprecated public class ParsedSchemaImpl implements ParsedSchema { @@ -10,7 +11,7 @@ public class ParsedSchemaImpl implements ParsedSchema { private byte[] rawSchema; public ParsedSchemaImpl() { - //empty + // empty } /** diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaLookupResult.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaLookupResult.java index 126ebe5fb1..0de4044183 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaLookupResult.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaLookupResult.java @@ -5,7 +5,8 @@ import io.apicurio.registry.serde.strategy.ArtifactReference; /** - * This class is deprecated and eventually will be replaced by {@link io.apicurio.registry.resolver.SchemaLookupResult} + * This class is deprecated and eventually will be replaced by + * {@link io.apicurio.registry.resolver.SchemaLookupResult} */ @Deprecated public class SchemaLookupResult { @@ -21,7 +22,7 @@ public class SchemaLookupResult { private String version; private SchemaLookupResult() { - //empty initialize manually + // empty initialize manually } /** @@ -81,35 +82,23 @@ public String getVersion() { } public ArtifactReference toArtifactReference() { - return ArtifactReference.builder() - .globalId(this.getGlobalId()) - .contentId(this.getContentId()) - .contentHash(this.getContentHash()) - .groupId(this.getGroupId()) - .artifactId(this.getArtifactId()) - .version(this.getVersion()) - .build(); + return ArtifactReference.builder().globalId(this.getGlobalId()).contentId(this.getContentId()) + .contentHash(this.getContentHash()).groupId(this.getGroupId()) + .artifactId(this.getArtifactId()).version(this.getVersion()).build(); } public ArtifactCoordinates toArtifactCoordinates() { - return ArtifactCoordinates.builder() - .groupId(this.getGroupId()) - .artifactId(this.getArtifactId()) - .version(this.getVersion()) - .build(); + return ArtifactCoordinates.builder().groupId(this.getGroupId()).artifactId(this.getArtifactId()) + .version(this.getVersion()).build(); } @SuppressWarnings("rawtypes") public io.apicurio.registry.resolver.SchemaLookupResult toCompat() { - return io.apicurio.registry.resolver.SchemaLookupResult.builder() - .contentId(contentId) - .contentHash(contentHash) - .globalId(globalId) - .groupId(groupId) - .artifactId(artifactId) - .version(version) - .parsedSchema(new ParsedSchemaImpl<>().setParsedSchema(schema).setRawSchema(rawSchema)) - .build(); + return io.apicurio.registry.resolver.SchemaLookupResult.builder().contentId(contentId) + .contentHash(contentHash).globalId(globalId).groupId(groupId).artifactId(artifactId) + .version(version) + .parsedSchema(new ParsedSchemaImpl<>().setParsedSchema(schema).setRawSchema(rawSchema)) + .build(); } public static SchemaLookupResultBuilder builder() { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaParser.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaParser.java index 0083414825..b7b615ea19 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaParser.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaParser.java @@ -1,7 +1,8 @@ package io.apicurio.registry.serde; /** - * This class is deprecated and eventually will be replaced by {@link io.apicurio.registry.resolver.SchemaParser} + * This class is deprecated and eventually will be replaced by + * {@link io.apicurio.registry.resolver.SchemaParser} */ @Deprecated public interface SchemaParser { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolver.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolver.java index d521c6e76e..03b277f5aa 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolver.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolver.java @@ -1,10 +1,5 @@ package io.apicurio.registry.serde; -import java.io.Closeable; -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import org.apache.kafka.common.header.Headers; import io.apicurio.registry.resolver.data.Record; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; import io.apicurio.registry.rest.client.RegistryClient; @@ -12,15 +7,21 @@ import io.apicurio.registry.serde.data.KafkaSerdeRecord; import io.apicurio.registry.serde.strategy.ArtifactReference; import io.apicurio.registry.serde.strategy.ArtifactResolverStrategy; +import org.apache.kafka.common.header.Headers; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; /** - * - * This interface is kept for compatibility, It's recommended to migrate custom implementations to adhere the new interface {@link io.apicurio.registry.resolver.SchemaResolver} - * + * This interface is kept for compatibility, It's recommended to migrate custom implementations to adhere the + * new interface {@link io.apicurio.registry.resolver.SchemaResolver} */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Deprecated -public interface SchemaResolver extends io.apicurio.registry.resolver.SchemaResolver, Closeable { +public interface SchemaResolver + extends io.apicurio.registry.resolver.SchemaResolver, Closeable { /** * Configure, if supported. @@ -38,11 +39,12 @@ default void configure(Map configs, boolean isKey, SchemaParser artifactResolverStrategy) { - setArtifactResolverStrategy((ArtifactReferenceResolverStrategy)artifactResolverStrategy); + setArtifactResolverStrategy((ArtifactReferenceResolverStrategy) artifactResolverStrategy); } /** * Used by Serializers to lookup the schema for a given kafka record. + * * @param topic * @param headers, can be null * @param data @@ -50,11 +52,14 @@ default void setArtifactResolverStrategy(ArtifactResolverStrategy artifa * @return SchemaLookupResult */ @Deprecated - public SchemaLookupResult resolveSchema(String topic, Headers headers, DATA data, ParsedSchema parsedSchema); + public SchemaLookupResult resolveSchema(String topic, Headers headers, DATA data, + ParsedSchema parsedSchema); /** - * Used by Deserializers to lookup the schema for a given kafka record. - * The schema resolver may use different pieces of information from the {@link ArtifactReference} depending on the configuration of the schema resolver. + * Used by Deserializers to lookup the schema for a given kafka record. The schema resolver may use + * different pieces of information from the {@link ArtifactReference} depending on the configuration of + * the schema resolver. + * * @param reference * @return SchemaLookupResult */ @@ -69,10 +74,12 @@ default void setArtifactResolverStrategy(ArtifactResolverStrategy artifa public void reset(); /** - * @see io.apicurio.registry.resolver.SchemaResolver#configure(java.util.Map, io.apicurio.registry.resolver.SchemaParser) + * @see io.apicurio.registry.resolver.SchemaResolver#configure(java.util.Map, + * io.apicurio.registry.resolver.SchemaParser) */ @Override - default void configure(Map configs, io.apicurio.registry.resolver.SchemaParser schemaMapper) { + default void configure(Map configs, + io.apicurio.registry.resolver.SchemaParser schemaMapper) { configure(configs, true, new SchemaParser() { /** @@ -94,8 +101,6 @@ public Object parseSchema(byte[] rawSchema) { }); } - - /** * @see io.apicurio.registry.resolver.SchemaResolver#resolveSchema(io.apicurio.registry.resolver.data.Record) */ @@ -104,25 +109,22 @@ default io.apicurio.registry.resolver.SchemaLookupResult resolveSchema(R KafkaSerdeRecord kdata = (KafkaSerdeRecord) data; KafkaSerdeMetadata metadata = kdata.metadata(); io.apicurio.registry.resolver.ParsedSchema ps = getSchemaParser().getSchemaFromData(data); - ParsedSchema compatps = ps == null ? null : new ParsedSchemaImpl().setParsedSchema(ps.getParsedSchema()).setRawSchema(ps.getRawSchema()); - return resolveSchema(metadata.getTopic(), metadata.getHeaders(), kdata.payload(), compatps).toCompat(); + ParsedSchema compatps = ps == null ? null : new ParsedSchemaImpl() + .setParsedSchema(ps.getParsedSchema()).setRawSchema(ps.getRawSchema()); + return resolveSchema(metadata.getTopic(), metadata.getHeaders(), kdata.payload(), compatps) + .toCompat(); } - - /** * @see io.apicurio.registry.resolver.SchemaResolver#resolveSchemaByArtifactReference(io.apicurio.registry.resolver.strategy.ArtifactReference) */ @Override default io.apicurio.registry.resolver.SchemaLookupResult resolveSchemaByArtifactReference( io.apicurio.registry.resolver.strategy.ArtifactReference reference) { - return resolveSchemaByArtifactReference(ArtifactReference.builder() - .contentId(reference.getContentId()) - .globalId(reference.getGlobalId()) - .groupId(reference.getGroupId()) - .artifactId(reference.getArtifactId()) - .version(reference.getVersion()) - .build()) + return resolveSchemaByArtifactReference( + ArtifactReference.builder().contentId(reference.getContentId()) + .globalId(reference.getGlobalId()).groupId(reference.getGroupId()) + .artifactId(reference.getArtifactId()).version(reference.getVersion()).build()) .toCompat(); } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolverConfigurer.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolverConfigurer.java index 8728ec72ca..e34ebbe948 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolverConfigurer.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SchemaResolverConfigurer.java @@ -1,8 +1,5 @@ package io.apicurio.registry.serde; -import java.util.Map; -import java.util.Objects; - import io.apicurio.registry.resolver.DefaultSchemaResolver; import io.apicurio.registry.resolver.SchemaParser; import io.apicurio.registry.resolver.SchemaResolver; @@ -10,9 +7,11 @@ import io.apicurio.registry.resolver.utils.Utils; import io.apicurio.registry.rest.client.RegistryClient; +import java.util.Map; +import java.util.Objects; + /** * Base class for any kind of serializer/deserializer that depends on {@link SchemaResolver} - * */ public class SchemaResolverConfigurer { @@ -33,10 +32,7 @@ public SchemaResolverConfigurer(SchemaResolver schemaResolver) { this(null, schemaResolver); } - public SchemaResolverConfigurer( - RegistryClient client, - SchemaResolver schemaResolver - ) { + public SchemaResolverConfigurer(RegistryClient client, SchemaResolver schemaResolver) { this(); setSchemaResolver(schemaResolver); getSchemaResolver().setClient(client); @@ -63,7 +59,8 @@ protected void configure(Map configs, boolean isKey, SchemaParse } // enforce default artifactResolverStrategy for kafka apps if (!configs.containsKey(SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY)) { - configs.put(SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY, SerdeConfig.ARTIFACT_RESOLVER_STRATEGY_DEFAULT); + configs.put(SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY, + SerdeConfig.ARTIFACT_RESOLVER_STRATEGY_DEFAULT); } // isKey is passed via config property configs.put(SerdeConfig.IS_KEY, isKey); diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeConfig.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeConfig.java index b6dcd4d46b..7e198e6707 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeConfig.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeConfig.java @@ -1,7 +1,5 @@ package io.apicurio.registry.serde; -import java.util.Properties; - import io.apicurio.registry.resolver.DefaultSchemaResolver; import io.apicurio.registry.resolver.SchemaResolverConfig; import io.apicurio.registry.serde.config.IdOption; @@ -12,26 +10,28 @@ import io.apicurio.registry.serde.strategy.ArtifactResolverStrategy; import io.apicurio.registry.serde.strategy.TopicIdStrategy; +import java.util.Properties; + /** - * Contains all of the Serde configuration properties. These are all the property names used when - * configuring serde classes in Kafka apps via a {@link Properties} object. Serde classes can be - * used by creating them directly as well, in which case these property names are not relevant. + * Contains all of the Serde configuration properties. These are all the property names used when configuring + * serde classes in Kafka apps via a {@link Properties} object. Serde classes can be used by creating them + * directly as well, in which case these property names are not relevant. */ public class SerdeConfig { /** * Fully qualified Java classname of a class that implements {@link ArtifactResolverStrategy} and is - * responsible for mapping between the Kafka serde information and an artifactId. For example - * there is a strategy to use the topic name as the schema's artifactId. Only used by the - * Serializer serde class. + * responsible for mapping between the Kafka serde information and an artifactId. For example there is a + * strategy to use the topic name as the schema's artifactId. Only used by the Serializer serde + * class. */ public static final String ARTIFACT_RESOLVER_STRATEGY = SchemaResolverConfig.ARTIFACT_RESOLVER_STRATEGY; public static final String ARTIFACT_RESOLVER_STRATEGY_DEFAULT = TopicIdStrategy.class.getName(); /** * Fully qualified Java classname of a class that implements {@link SchemaResolver}. - * {@link DefaultSchemaResolver} is used by default. - * The SchemaResolver is used both by Serializer and Deserializer classes. + * {@link DefaultSchemaResolver} is used by default. The SchemaResolver is used both by Serializer and + * Deserializer classes. */ public static final String SCHEMA_RESOLVER = "apicurio.registry.schema-resolver"; public static final String SCHEMA_RESOLVER_DEFAULT = DefaultSchemaResolver.class.getName(); @@ -42,64 +42,66 @@ public class SerdeConfig { public static final String IS_KEY = "apicurio.registry.is-key"; /** - * Optional, boolean to indicate whether serializer classes should attempt to create an artifact in the registry. - * Note: JsonSchema serializer does not support this feature yet. + * Optional, boolean to indicate whether serializer classes should attempt to create an artifact in the + * registry. Note: JsonSchema serializer does not support this feature yet. */ public static final String AUTO_REGISTER_ARTIFACT = SchemaResolverConfig.AUTO_REGISTER_ARTIFACT; public static final boolean AUTO_REGISTER_ARTIFACT_DEFAULT = SchemaResolverConfig.AUTO_REGISTER_ARTIFACT_DEFAULT; /** - * Optional, one of {@link IfExists} to indicate the behavior of the client when there is a conflict creating an artifact because the artifact already exists. + * Optional, one of {@link IfExists} to indicate the behavior of the client when there is a conflict + * creating an artifact because the artifact already exists. */ public static final String AUTO_REGISTER_ARTIFACT_IF_EXISTS = SchemaResolverConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS; public static final String AUTO_REGISTER_ARTIFACT_IF_EXISTS_DEFAULT = SchemaResolverConfig.AUTO_REGISTER_ARTIFACT_IF_EXISTS_DEFAULT; /** - * Optional, boolean to indicate whether serializer classes should attempt to find the latest artifact in the registry for the corresponding groupId/artifactId. - * GroupId and artifactId are configured either via {@link ArtifactResolverStrategy} or via config properties such as {@link SerdeConfig#EXPLICIT_ARTIFACT_ID}. + * Optional, boolean to indicate whether serializer classes should attempt to find the latest artifact in + * the registry for the corresponding groupId/artifactId. GroupId and artifactId are configured either via + * {@link ArtifactResolverStrategy} or via config properties such as + * {@link SerdeConfig#EXPLICIT_ARTIFACT_ID}. */ public static final String FIND_LATEST_ARTIFACT = SchemaResolverConfig.FIND_LATEST_ARTIFACT; public static final boolean FIND_LATEST_ARTIFACT_DEFAULT = SchemaResolverConfig.FIND_LATEST_ARTIFACT_DEFAULT; /** - * Only applicable for serializers - * Optional, set explicitly the groupId used for querying/creating an artifact. - * Overrides the groupId returned by the {@link ArtifactResolverStrategy} + * Only applicable for serializers Optional, set explicitly the groupId used for querying/creating an + * artifact. Overrides the groupId returned by the {@link ArtifactResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_GROUP_ID = SchemaResolverConfig.EXPLICIT_ARTIFACT_GROUP_ID; /** - * Only applicable for serializers - * Optional, set explicitly the artifactId used for querying/creating an artifact. - * Overrides the artifactId returned by the {@link ArtifactResolverStrategy} + * Only applicable for serializers Optional, set explicitly the artifactId used for querying/creating an + * artifact. Overrides the artifactId returned by the {@link ArtifactResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_ID = SchemaResolverConfig.EXPLICIT_ARTIFACT_ID; /** - * Only applicable for serializers - * Optional, set explicitly the schema used for serialization. + * Only applicable for serializers Optional, set explicitly the schema used for serialization. */ public static final String SCHEMA_LOCATION = SchemaResolverConfig.SCHEMA_LOCATION; /** - * Only applicable for serializers - * Optional, set explicitly the version used for querying/creating an artifact. - * Overrides the version returned by the {@link ArtifactResolverStrategy} + * Only applicable for serializers Optional, set explicitly the version used for querying/creating an + * artifact. Overrides the version returned by the {@link ArtifactResolverStrategy} */ public static final String EXPLICIT_ARTIFACT_VERSION = SchemaResolverConfig.EXPLICIT_ARTIFACT_VERSION; /** - * The URL of the Apicurio Registry. Required when using any Apicurio Registry serde class (serializer or deserializer). + * The URL of the Apicurio Registry. Required when using any Apicurio Registry serde class (serializer or + * deserializer). */ public static final String REGISTRY_URL = SchemaResolverConfig.REGISTRY_URL; /** - * The URL of the Token Endpoint. Required when using any Apicurio Registry serde class (serializer or deserializer) against a secured Apicurio Registry and AUTH_SERVICE_URL is not specified. + * The URL of the Token Endpoint. Required when using any Apicurio Registry serde class (serializer or + * deserializer) against a secured Apicurio Registry and AUTH_SERVICE_URL is not specified. */ public static final String AUTH_TOKEN_ENDPOINT = SchemaResolverConfig.AUTH_TOKEN_ENDPOINT; /** - * The URL of the Auth Service. Required when using any Apicurio Registry serde class (serializer or deserializer) against a secured Apicurio Registry. + * The URL of the Auth Service. Required when using any Apicurio Registry serde class (serializer or + * deserializer) against a secured Apicurio Registry. */ public static final String AUTH_SERVICE_URL = SchemaResolverConfig.AUTH_SERVICE_URL; public static final String AUTH_SERVICE_URL_TOKEN_ENDPOINT = SchemaResolverConfig.AUTH_SERVICE_URL_TOKEN_ENDPOINT; @@ -130,65 +132,67 @@ public class SerdeConfig { public static final String AUTH_PASSWORD = SchemaResolverConfig.AUTH_PASSWORD; /** - * Fully qualified Java classname of a class that implements {@link IdHandler} and is responsible - * for writing the schema's Global ID to the message payload. Only used when {@link SerdeConfig#ENABLE_HEADERS} is - * missing or 'false'. + * Fully qualified Java classname of a class that implements {@link IdHandler} and is responsible for + * writing the schema's Global ID to the message payload. Only used when + * {@link SerdeConfig#ENABLE_HEADERS} is missing or 'false'. */ public static final String ID_HANDLER = "apicurio.registry.id-handler"; public static final String ID_HANDLER_DEFAULT = DefaultIdHandler.class.getName(); /** - * Shortcut for enabling the Legacy (Confluent compatible) implementation of {@link IdHandler}. Should - * not be used with "ID_HANDLER". The value should be 'true' or 'false'. + * Shortcut for enabling the Legacy (Confluent compatible) implementation of {@link IdHandler}. Should not + * be used with "ID_HANDLER". The value should be 'true' or 'false'. */ public static final String ENABLE_CONFLUENT_ID_HANDLER = "apicurio.registry.as-confluent"; /** - * Boolean to indicate whether serde classes should pass Global Id information via message headers - * instead of in the message payload. + * Boolean to indicate whether serde classes should pass Global Id information via message headers instead + * of in the message payload. */ - public static final String ENABLE_HEADERS= "apicurio.registry.headers.enabled"; + public static final String ENABLE_HEADERS = "apicurio.registry.headers.enabled"; public static final boolean ENABLE_HEADERS_DEFAULT = true; /** - * Fully qualified Java classname of a class that implements {@link HeadersHandler} and is responsible - * for writing the schema's Global ID to the message headers. Only used when {@link SerdeConfig#ENABLE_HEADERS} is 'true'. + * Fully qualified Java classname of a class that implements {@link HeadersHandler} and is responsible for + * writing the schema's Global ID to the message headers. Only used when + * {@link SerdeConfig#ENABLE_HEADERS} is 'true'. */ public static final String HEADERS_HANDLER = "apicurio.registry.headers.handler"; public static final String HEADERS_HANDLER_DEFAULT = DefaultHeadersHandler.class.getName(); /** - * Indicates how long to cache artifacts before auto-eviction. If not included, the artifact will be fetched every time. + * Indicates how long to cache artifacts before auto-eviction. If not included, the artifact will be + * fetched every time. */ public static final String CHECK_PERIOD_MS = SchemaResolverConfig.CHECK_PERIOD_MS; public static final long CHECK_PERIOD_MS_DEFAULT = SchemaResolverConfig.CHECK_PERIOD_MS_DEFAULT; /** - * If a schema can not be retrieved from the Registry, serdes may retry a number of times. - * This configuration option controls the number of retries before failing. - * Valid values are non-negative integers. + * If a schema can not be retrieved from the Registry, serdes may retry a number of times. This + * configuration option controls the number of retries before failing. Valid values are non-negative + * integers. */ public static final String RETRY_COUNT = SchemaResolverConfig.RETRY_COUNT; public static final long RETRY_COUNT_DEFAULT = SchemaResolverConfig.RETRY_COUNT_DEFAULT; /** - * If a schema can not be be retrieved from the Registry, serdes may retry a number of times. - * This configuration option controls the delay between the retry attempts, in milliseconds. - * Valid values are non-negative integers. + * If a schema can not be be retrieved from the Registry, serdes may retry a number of times. This + * configuration option controls the delay between the retry attempts, in milliseconds. Valid values are + * non-negative integers. */ public static final String RETRY_BACKOFF_MS = SchemaResolverConfig.RETRY_BACKOFF_MS; public static final long RETRY_BACKOFF_MS_DEFAULT = SchemaResolverConfig.RETRY_BACKOFF_MS_DEFAULT; /** * Configures the serdes to use the specified {@link IdOption} as the identifier for the artifacts. - * Instructs the serializer to write the specified id into the kafka records and - * instructs the deserializer to read and use the specified id from the kafka records (to find the schema). + * Instructs the serializer to write the specified id into the kafka records and instructs the + * deserializer to read and use the specified id from the kafka records (to find the schema). */ public static final String USE_ID = "apicurio.registry.use-id"; public static final String USE_ID_DEFAULT = IdOption.globalId.name(); /** - * Boolean used to enable or disable validation. Not applicable to all serde classes. For example, the + * Boolean used to enable or disable validation. Not applicable to all serde classes. For example, the * JSON Schema serde classes use this to enable or disable JSON Schema validation (unlike Avro, the JSON * Schema schema is not required to serialize/deserialize the message payload). */ @@ -196,115 +200,129 @@ public class SerdeConfig { public static final boolean VALIDATION_ENABLED_DEFAULT = true; /** - * Only applicable for deserializers - * Optional, set explicitly the groupId used as fallback for resolving the artifact used for deserialization. + * Only applicable for deserializers Optional, set explicitly the groupId used as fallback for resolving + * the artifact used for deserialization. */ public static final String FALLBACK_ARTIFACT_GROUP_ID = "apicurio.registry.fallback.group-id"; /** - * Only applicable for deserializers - * Optional, set explicitly the artifactId used as fallback for resolving the artifact used for deserialization. + * Only applicable for deserializers Optional, set explicitly the artifactId used as fallback for + * resolving the artifact used for deserialization. */ public static final String FALLBACK_ARTIFACT_ID = "apicurio.registry.fallback.artifact-id"; /** - * Only applicable for deserializers - * Optional, set explicitly the version used as fallback for resolving the artifact used for deserialization. + * Only applicable for deserializers Optional, set explicitly the version used as fallback for resolving + * the artifact used for deserialization. */ public static final String FALLBACK_ARTIFACT_VERSION = "apicurio.registry.fallback.version"; /** - * Only applicable for deserializers - * Optional, allows to set a custom implementation of {@link FallbackArtifactProvider} , for resolving the artifact used for deserialization. + * Only applicable for deserializers Optional, allows to set a custom implementation of + * {@link FallbackArtifactProvider} , for resolving the artifact used for deserialization. */ public static final String FALLBACK_ARTIFACT_PROVIDER = "apicurio.registry.fallback.provider"; - public static final String FALLBACK_ARTIFACT_PROVIDER_DEFAULT = DefaultFallbackArtifactProvider.class.getName(); - + public static final String FALLBACK_ARTIFACT_PROVIDER_DEFAULT = DefaultFallbackArtifactProvider.class + .getName(); /** - * Fully qualified Java classname of a class that will be used as the return type for the deserializer. Aplicable for keys deserialization. - * Forces the deserializer to return objects of this type, if not present the return type will be obtained from the message headers, if updated by the serializer. + * Fully qualified Java classname of a class that will be used as the return type for the deserializer. + * Aplicable for keys deserialization. Forces the deserializer to return objects of this type, if not + * present the return type will be obtained from the message headers, if updated by the serializer. * Supported by JsonSchema and Protobuf deserializers. */ public static final String DESERIALIZER_SPECIFIC_KEY_RETURN_CLASS = "apicurio.registry.deserializer.key.return-class"; /** - * Fully qualified Java classname of a class that will be used as the return type for the deserializer. Aplicable for values deserialization. - * Forces the deserializer to return objects of this type, if not present the return type will be obtained from the message headers, if updated by the serializer. + * Fully qualified Java classname of a class that will be used as the return type for the deserializer. + * Aplicable for values deserialization. Forces the deserializer to return objects of this type, if not + * present the return type will be obtained from the message headers, if updated by the serializer. * Supported by JsonSchema and Protobuf deserializers. */ public static final String DESERIALIZER_SPECIFIC_VALUE_RETURN_CLASS = "apicurio.registry.deserializer.value.return-class"; /** - * Used to override the Kafka message header name used to pass the groupId for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_GROUP_ID}. + * Used to override the Kafka message header name used to pass the groupId for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_GROUP_ID}. */ public static final String HEADER_KEY_GROUP_ID_OVERRIDE_NAME = "apicurio.registry.headers.key.groupId.name"; /** - * Used to override the Kafka message header name used to pass the groupId for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_GROUP_ID}. + * Used to override the Kafka message header name used to pass the groupId for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_GROUP_ID}. */ public static final String HEADER_VALUE_GROUP_ID_OVERRIDE_NAME = "apicurio.registry.headers.value.groupId.name"; /** - * Used to override the Kafka message header name used to pass the artifactId for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_ARTIFACT_ID}. + * Used to override the Kafka message header name used to pass the artifactId for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_ARTIFACT_ID}. */ public static final String HEADER_KEY_ARTIFACT_ID_OVERRIDE_NAME = "apicurio.registry.headers.key.artifactId.name"; /** - * Used to override the Kafka message header name used to pass the artifactId for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_ARTIFACT_ID}. + * Used to override the Kafka message header name used to pass the artifactId for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_ARTIFACT_ID}. */ public static final String HEADER_VALUE_ARTIFACT_ID_OVERRIDE_NAME = "apicurio.registry.headers.value.artifactId.name"; /** - * Used to override the Kafka message header name used to pass the version for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_VERSION}. + * Used to override the Kafka message header name used to pass the version for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_VERSION}. */ public static final String HEADER_KEY_VERSION_OVERRIDE_NAME = "apicurio.registry.headers.key.version.name"; /** - * Used to override the Kafka message header name used to pass the version for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_VERSION}. + * Used to override the Kafka message header name used to pass the version for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_VERSION}. */ public static final String HEADER_VALUE_VERSION_OVERRIDE_NAME = "apicurio.registry.headers.value.version.name"; /** - * Used to override the Kafka message header name used to pass the globalId for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_GLOBAL_ID}. + * Used to override the Kafka message header name used to pass the globalId for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_GLOBAL_ID}. */ public static final String HEADER_KEY_GLOBAL_ID_OVERRIDE_NAME = "apicurio.registry.headers.key.globalId.name"; /** - * Used to override the Kafka message header name used to pass the globalId for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_GLOBAL_ID}. + * Used to override the Kafka message header name used to pass the globalId for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_GLOBAL_ID}. */ public static final String HEADER_VALUE_GLOBAL_ID_OVERRIDE_NAME = "apicurio.registry.headers.value.globalId.name"; /** - * Used to override the Kafka message header name used to pass the contentId for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_CONTENT_ID}. + * Used to override the Kafka message header name used to pass the contentId for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_CONTENT_ID}. */ public static final String HEADER_KEY_CONTENT_ID_OVERRIDE_NAME = "apicurio.registry.headers.key.contentId.name"; /** - * Used to override the Kafka message header name used to pass the contentId for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_CONTENT_ID}. + * Used to override the Kafka message header name used to pass the contentId for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_CONTENT_ID}. */ public static final String HEADER_VALUE_CONTENT_ID_OVERRIDE_NAME = "apicurio.registry.headers.value.contentId.name"; /** - * Used to override the Kafka message header name used to pass the contentHash for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_KEY_CONTENT_HASH}. + * Used to override the Kafka message header name used to pass the contentHash for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_KEY_CONTENT_HASH}. */ public static final String HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME = "apicurio.registry.headers.key.contentHash.name"; /** - * Used to override the Kafka message header name used to pass the contentHash for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is {@link SerdeHeaders#HEADER_VALUE_CONTENT_HASH}. + * Used to override the Kafka message header name used to pass the contentHash for the message value. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Default value is + * {@link SerdeHeaders#HEADER_VALUE_CONTENT_HASH}. */ public static final String HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME = "apicurio.registry.headers.value.contentHash.name"; /** - * Used to override the Kafka message header name used to pass the message type for the message key. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Only used by the JSON Schema serde classes. - * Default value is {@link SerdeHeaders#HEADER_KEY_MESSAGE_TYPE}. + * Used to override the Kafka message header name used to pass the message type for the message key. Only + * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Only used by the JSON Schema serde + * classes. Default value is {@link SerdeHeaders#HEADER_KEY_MESSAGE_TYPE}. */ public static final String HEADER_KEY_MESSAGE_TYPE_OVERRIDE_NAME = "apicurio.registry.headers.key.msgType.name"; /** - * Used to override the Kafka message header name used to pass the message type for the message value. Only - * applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Only used by the JSON Schema serde classes. - * Default value is {@link SerdeHeaders#HEADER_VALUE_MESSAGE_TYPE}. + * Used to override the Kafka message header name used to pass the message type for the message value. + * Only applicable when {@link SerdeConfig#ENABLE_HEADERS} is enabled. Only used by the JSON Schema serde + * classes. Default value is {@link SerdeHeaders#HEADER_VALUE_MESSAGE_TYPE}. */ public static final String HEADER_VALUE_MESSAGE_TYPE_OVERRIDE_NAME = "apicurio.registry.headers.value.msgType.name"; diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeHeaders.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeHeaders.java index dc89961120..70bc9f1c5c 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeHeaders.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/SerdeHeaders.java @@ -1,10 +1,8 @@ package io.apicurio.registry.serde; /** - * Contains all of the header names used when serde classes are configured to pass information - * via headers instead of via the message payload. Note that these header names can be overridden - * via configuration. - * + * Contains all of the header names used when serde classes are configured to pass information via headers + * instead of via the message payload. Note that these header names can be overridden via configuration. */ public class SerdeHeaders { @@ -26,4 +24,3 @@ public class SerdeHeaders { public static final String HEADER_VALUE_MESSAGE_TYPE = "apicurio.value.msgType"; } - diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaDeserializerConfig.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaDeserializerConfig.java index 7d33b5acc6..91f8b07178 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaDeserializerConfig.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaDeserializerConfig.java @@ -1,21 +1,22 @@ package io.apicurio.registry.serde.config; -import static io.apicurio.registry.serde.SerdeConfig.FALLBACK_ARTIFACT_PROVIDER; -import static io.apicurio.registry.serde.SerdeConfig.FALLBACK_ARTIFACT_PROVIDER_DEFAULT; -import java.util.Map; - import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.FALLBACK_ARTIFACT_PROVIDER; +import static io.apicurio.registry.serde.SerdeConfig.FALLBACK_ARTIFACT_PROVIDER_DEFAULT; + public class BaseKafkaDeserializerConfig extends BaseKafkaSerDeConfig { public static ConfigDef configDef() { - ConfigDef configDef = new ConfigDef() - .define(FALLBACK_ARTIFACT_PROVIDER, Type.CLASS, FALLBACK_ARTIFACT_PROVIDER_DEFAULT, Importance.HIGH, "TODO docs"); + ConfigDef configDef = new ConfigDef().define(FALLBACK_ARTIFACT_PROVIDER, Type.CLASS, + FALLBACK_ARTIFACT_PROVIDER_DEFAULT, Importance.HIGH, "TODO docs"); return configDef; - } + } public BaseKafkaDeserializerConfig(Map originals) { super(configDef(), originals); diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaSerDeConfig.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaSerDeConfig.java index c43f6843c0..1531b79cbe 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaSerDeConfig.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/BaseKafkaSerDeConfig.java @@ -1,14 +1,14 @@ package io.apicurio.registry.serde.config; -import static io.apicurio.registry.serde.SerdeConfig.*; - -import java.util.Map; - import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.*; + public class BaseKafkaSerDeConfig extends AbstractConfig { private static ConfigDef buildConfigDef(ConfigDef base) { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/IdOption.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/IdOption.java index b6d43876ca..4c0ed49b15 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/IdOption.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/config/IdOption.java @@ -2,7 +2,6 @@ public enum IdOption { - globalId, - contentId; + globalId, contentId; } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/data/KafkaSerdeMetadata.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/data/KafkaSerdeMetadata.java index 1aa7e6cbfa..ab350dfcc8 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/data/KafkaSerdeMetadata.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/data/KafkaSerdeMetadata.java @@ -1,9 +1,8 @@ package io.apicurio.registry.serde.data; -import org.apache.kafka.common.header.Headers; - import io.apicurio.registry.resolver.data.Metadata; import io.apicurio.registry.resolver.strategy.ArtifactReference; +import org.apache.kafka.common.header.Headers; /** * Kafka specific implementation for the Record Metadata abstraction used by the SchemaResolver @@ -34,6 +33,7 @@ public ArtifactReference artifactReference() { public String getTopic() { return topic; } + /** * @return the isKey */ diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/DefaultFallbackArtifactProvider.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/DefaultFallbackArtifactProvider.java index 8c114184f6..071d5b674f 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/DefaultFallbackArtifactProvider.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/DefaultFallbackArtifactProvider.java @@ -1,11 +1,10 @@ package io.apicurio.registry.serde.fallback; -import java.util.Map; - -import org.apache.kafka.common.header.Headers; - import io.apicurio.registry.resolver.strategy.ArtifactReference; import io.apicurio.registry.serde.SerdeConfig; +import org.apache.kafka.common.header.Headers; + +import java.util.Map; /** * Default implementation of FallbackArtifactProvider that simply uses config properties @@ -39,17 +38,15 @@ public void configure(Map configs, boolean isKey) { String fallbackVersion = (String) configs.get(versionConfigKey); if (fallbackArtifactId != null) { - fallbackArtifactReference = ArtifactReference.builder() - .groupId(fallbackGroupId) - .artifactId(fallbackArtifactId) - .version(fallbackVersion) - .build(); + fallbackArtifactReference = ArtifactReference.builder().groupId(fallbackGroupId) + .artifactId(fallbackArtifactId).version(fallbackVersion).build(); } } /** - * @see io.apicurio.registry.serde.fallback.FallbackArtifactProvider#get(java.lang.String, org.apache.kafka.common.header.Headers, byte[]) + * @see io.apicurio.registry.serde.fallback.FallbackArtifactProvider#get(java.lang.String, + * org.apache.kafka.common.header.Headers, byte[]) */ @Override public ArtifactReference get(String topic, Headers headers, byte[] data) { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/FallbackArtifactProvider.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/FallbackArtifactProvider.java index 60b5429a6d..b65c5daf1a 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/FallbackArtifactProvider.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/fallback/FallbackArtifactProvider.java @@ -1,14 +1,13 @@ package io.apicurio.registry.serde.fallback; -import java.util.Map; - +import io.apicurio.registry.resolver.strategy.ArtifactReference; import org.apache.kafka.common.header.Headers; -import io.apicurio.registry.resolver.strategy.ArtifactReference; +import java.util.Map; /** - * Interface for providing a fallback ArtifactReference when the SchemaResolver is not able to find an ArtifactReference in the kafka message - * + * Interface for providing a fallback ArtifactReference when the SchemaResolver is not able to find an + * ArtifactReference in the kafka message */ public interface FallbackArtifactProvider { @@ -16,8 +15,9 @@ default void configure(Map configs, boolean isKey) { } /** - * Returns an ArtifactReference that will be used as the fallback - * to search in the registry for the artifact that will be used to deserialize the kafka message + * Returns an ArtifactReference that will be used as the fallback to search in the registry for the + * artifact that will be used to deserialize the kafka message + * * @param topic * @param headers , can be null * @param data diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandler.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandler.java index fe6c34d3ba..e71e232b6f 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandler.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandler.java @@ -1,15 +1,14 @@ package io.apicurio.registry.serde.headers; -import java.nio.ByteBuffer; -import java.util.Map; - +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.serde.config.IdOption; +import io.apicurio.registry.utils.IoUtil; import org.apache.kafka.common.errors.SerializationException; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.header.Headers; -import io.apicurio.registry.resolver.strategy.ArtifactReference; -import io.apicurio.registry.serde.config.IdOption; -import io.apicurio.registry.utils.IoUtil; +import java.nio.ByteBuffer; +import java.util.Map; public class DefaultHeadersHandler implements HeadersHandler { @@ -46,13 +45,15 @@ public void configure(Map configs, boolean isKey) { } /** - * @see io.apicurio.registry.serde.headers.HeadersHandler#writeHeaders(org.apache.kafka.common.header.Headers, io.apicurio.registry.serde.SchemaLookupResult) + * @see io.apicurio.registry.serde.headers.HeadersHandler#writeHeaders(org.apache.kafka.common.header.Headers, + * io.apicurio.registry.serde.SchemaLookupResult) */ @Override public void writeHeaders(Headers headers, ArtifactReference reference) { if (idOption == IdOption.contentId) { if (reference.getContentId() == null) { - throw new SerializationException("Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); + throw new SerializationException( + "Missing contentId. IdOption is contentId but there is no contentId in the ArtifactReference"); } ByteBuffer buff = ByteBuffer.allocate(8); buff.putLong(reference.getContentId()); @@ -81,14 +82,9 @@ public void writeHeaders(Headers headers, ArtifactReference reference) { */ @Override public ArtifactReference readHeaders(Headers headers) { - return ArtifactReference.builder() - .globalId(getGlobalId(headers)) - .contentId(getContentId(headers)) - .contentHash(getContentHash(headers)) - .groupId(getGroupId(headers)) - .artifactId(getArtifactId(headers)) - .version(getVersion(headers)) - .build(); + return ArtifactReference.builder().globalId(getGlobalId(headers)).contentId(getContentId(headers)) + .contentHash(getContentHash(headers)).groupId(getGroupId(headers)) + .artifactId(getArtifactId(headers)).version(getVersion(headers)).build(); } private String getGroupId(Headers headers) { diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerConfig.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerConfig.java index 686ad12e9f..ad35eb6e22 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerConfig.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerConfig.java @@ -1,36 +1,47 @@ package io.apicurio.registry.serde.headers; -import static io.apicurio.registry.serde.SerdeConfig.*; -import static io.apicurio.registry.serde.SerdeHeaders.*; - -import java.util.Map; - +import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Importance; import org.apache.kafka.common.config.ConfigDef.Type; -import io.apicurio.registry.serde.config.BaseKafkaSerDeConfig; +import java.util.Map; + +import static io.apicurio.registry.serde.SerdeConfig.*; +import static io.apicurio.registry.serde.SerdeHeaders.*; public class DefaultHeadersHandlerConfig extends BaseKafkaSerDeConfig { public static ConfigDef configDef() { ConfigDef configDef = new ConfigDef() - .define(HEADER_KEY_GLOBAL_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_GLOBAL_ID, Importance.HIGH, "TODO docs") - .define(HEADER_KEY_CONTENT_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_CONTENT_ID, Importance.HIGH, "TODO docs") - .define(HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, Type.STRING, HEADER_KEY_CONTENT_HASH, Importance.HIGH, "TODO docs") - .define(HEADER_KEY_GROUP_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_GROUP_ID, Importance.HIGH, "TODO docs") - .define(HEADER_KEY_ARTIFACT_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_ARTIFACT_ID, Importance.HIGH, "TODO docs") - .define(HEADER_KEY_VERSION_OVERRIDE_NAME, Type.STRING, HEADER_KEY_VERSION, Importance.HIGH, "TODO docs") - - .define(HEADER_VALUE_GLOBAL_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_GLOBAL_ID, Importance.HIGH, "TODO docs") - .define(HEADER_VALUE_CONTENT_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_CONTENT_ID, Importance.HIGH, "TODO docs") - .define(HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_CONTENT_HASH, Importance.HIGH, "TODO docs") - .define(HEADER_VALUE_GROUP_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_GROUP_ID, Importance.HIGH, "TODO docs") - .define(HEADER_VALUE_ARTIFACT_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_ARTIFACT_ID, Importance.HIGH, "TODO docs") - .define(HEADER_VALUE_VERSION_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_VERSION, Importance.HIGH, "TODO docs"); + .define(HEADER_KEY_GLOBAL_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_GLOBAL_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_KEY_CONTENT_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_CONTENT_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, Type.STRING, HEADER_KEY_CONTENT_HASH, + Importance.HIGH, "TODO docs") + .define(HEADER_KEY_GROUP_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_GROUP_ID, Importance.HIGH, + "TODO docs") + .define(HEADER_KEY_ARTIFACT_ID_OVERRIDE_NAME, Type.STRING, HEADER_KEY_ARTIFACT_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_KEY_VERSION_OVERRIDE_NAME, Type.STRING, HEADER_KEY_VERSION, Importance.HIGH, + "TODO docs") + + .define(HEADER_VALUE_GLOBAL_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_GLOBAL_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_VALUE_CONTENT_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_CONTENT_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_CONTENT_HASH, + Importance.HIGH, "TODO docs") + .define(HEADER_VALUE_GROUP_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_GROUP_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_VALUE_ARTIFACT_ID_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_ARTIFACT_ID, + Importance.HIGH, "TODO docs") + .define(HEADER_VALUE_VERSION_OVERRIDE_NAME, Type.STRING, HEADER_VALUE_VERSION, + Importance.HIGH, "TODO docs"); return configDef; - } + } public DefaultHeadersHandlerConfig(Map originals) { super(configDef(), originals); diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/HeadersHandler.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/HeadersHandler.java index 12f0acdebd..b9fc88f18d 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/HeadersHandler.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/HeadersHandler.java @@ -1,15 +1,13 @@ package io.apicurio.registry.serde.headers; -import java.util.Map; - -import org.apache.kafka.common.header.Headers; - import io.apicurio.registry.resolver.strategy.ArtifactReference; +import org.apache.kafka.common.header.Headers; +import java.util.Map; /** - * Common interface for headers handling when serializing/deserializing kafka records that have {@link Headers} - * + * Common interface for headers handling when serializing/deserializing kafka records that have + * {@link Headers} */ public interface HeadersHandler { @@ -19,7 +17,9 @@ default void configure(Map configs, boolean isKey) { public void writeHeaders(Headers headers, ArtifactReference reference); /** - * Reads the kafka message headers and returns an ArtifactReference that can contain or not information to identify an Artifact in the registry. + * Reads the kafka message headers and returns an ArtifactReference that can contain or not information to + * identify an Artifact in the registry. + * * @param headers * @return ArtifactReference */ diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/MessageTypeSerdeHeaders.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/MessageTypeSerdeHeaders.java index 59832ca56c..7554eb1eba 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/MessageTypeSerdeHeaders.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/headers/MessageTypeSerdeHeaders.java @@ -1,17 +1,16 @@ package io.apicurio.registry.serde.headers; -import java.util.Map; - -import org.apache.kafka.common.header.Header; -import org.apache.kafka.common.header.Headers; - import io.apicurio.registry.serde.SerdeConfig; import io.apicurio.registry.serde.SerdeHeaders; import io.apicurio.registry.utils.IoUtil; +import org.apache.kafka.common.header.Header; +import org.apache.kafka.common.header.Headers; + +import java.util.Map; /** - * Common utility class for serializers and deserializers that use config properties such as {@link SerdeConfig#HEADER_VALUE_MESSAGE_TYPE_OVERRIDE_NAME} - * + * Common utility class for serializers and deserializers that use config properties such as + * {@link SerdeConfig#HEADER_VALUE_MESSAGE_TYPE_OVERRIDE_NAME} */ public class MessageTypeSerdeHeaders { @@ -19,9 +18,12 @@ public class MessageTypeSerdeHeaders { public MessageTypeSerdeHeaders(Map configs, boolean isKey) { if (isKey) { - messageTypeHeaderName = (String) configs.getOrDefault(SerdeConfig.HEADER_KEY_MESSAGE_TYPE_OVERRIDE_NAME, SerdeHeaders.HEADER_KEY_MESSAGE_TYPE); + messageTypeHeaderName = (String) configs.getOrDefault( + SerdeConfig.HEADER_KEY_MESSAGE_TYPE_OVERRIDE_NAME, SerdeHeaders.HEADER_KEY_MESSAGE_TYPE); } else { - messageTypeHeaderName = (String) configs.getOrDefault(SerdeConfig.HEADER_VALUE_MESSAGE_TYPE_OVERRIDE_NAME, SerdeHeaders.HEADER_VALUE_MESSAGE_TYPE); + messageTypeHeaderName = (String) configs.getOrDefault( + SerdeConfig.HEADER_VALUE_MESSAGE_TYPE_OVERRIDE_NAME, + SerdeHeaders.HEADER_VALUE_MESSAGE_TYPE); } } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactReference.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactReference.java index 6a6aa04310..fca5fcb974 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactReference.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactReference.java @@ -3,12 +3,11 @@ import io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl; /** - * There is a new implementation of this class that can be found here {@link io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl} and here {@linkio.apicurio.registry.resolver.strategy.ArtifactReference} - * We keep this class for compatibilty - * - * This class holds the information that reference one Artifact in Apicurio Registry. It will always make - * reference to an artifact in a group. Optionally it can reference to a specific version. - * + * There is a new implementation of this class that can be found here + * {@link io.apicurio.registry.resolver.strategy.ArtifactReferenceImpl} and here + * {@linkio.apicurio.registry.resolver.strategy.ArtifactReference} We keep this class for compatibilty This + * class holds the information that reference one Artifact in Apicurio Registry. It will always make reference + * to an artifact in a group. Optionally it can reference to a specific version. */ public class ArtifactReference extends ArtifactReferenceImpl { @@ -20,7 +19,7 @@ public static ArtifactReference fromGlobalId(Long globalId) { return builder().globalId(globalId).build(); } - public static ArtifactReferenceBuilder builder(){ + public static ArtifactReferenceBuilder builder() { return new ArtifactReferenceBuilder(); } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactResolverStrategy.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactResolverStrategy.java index a4290400a5..5919adb063 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactResolverStrategy.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/ArtifactResolverStrategy.java @@ -3,36 +3,38 @@ import io.apicurio.registry.resolver.ParsedSchema; import io.apicurio.registry.resolver.data.Record; import io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy; -import io.apicurio.registry.serde.data.KafkaSerdeRecord; import io.apicurio.registry.serde.data.KafkaSerdeMetadata; +import io.apicurio.registry.serde.data.KafkaSerdeRecord; /** - * There is a new interface responsible with the same responsibility as this one, can be found here {@link ArtifactReferenceResolverStrategy} - * The interface {@link ArtifactResolverStrategy} is kept for backwards compatibility - * - * A {@link ArtifactResolverStrategy} is used by the Kafka serializer/deserializer to determine - * the {@link ArtifactReference} under which the message schemas are located or should be registered + * There is a new interface responsible with the same responsibility as this one, can be found here + * {@link ArtifactReferenceResolverStrategy} The interface {@link ArtifactResolverStrategy} is kept for + * backwards compatibility A {@link ArtifactResolverStrategy} is used by the Kafka serializer/deserializer to + * determine the {@link ArtifactReference} under which the message schemas are located or should be registered * in the registry. The default is {@link TopicIdStrategy}. - * */ public interface ArtifactResolverStrategy extends ArtifactReferenceResolverStrategy { /** - * For a given topic and message, returns the {@link ArtifactReference} under which the message schemas are located or should be registered - * in the registry. + * For a given topic and message, returns the {@link ArtifactReference} under which the message schemas + * are located or should be registered in the registry. * * @param topic the Kafka topic name to which the message is being published. * @param isKey true when encoding a message key, false for a message value. - * @param schema the schema of the message being serialized/deserialized, can be null if we don't know it beforehand - * @return the {@link ArtifactReference} under which the message schemas are located or should be registered + * @param schema the schema of the message being serialized/deserialized, can be null if we don't know it + * beforehand + * @return the {@link ArtifactReference} under which the message schemas are located or should be + * registered */ ArtifactReference artifactReference(String topic, boolean isKey, T schema); @Override - default io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { + default io.apicurio.registry.resolver.strategy.ArtifactReference artifactReference(Record data, + ParsedSchema parsedSchema) { KafkaSerdeRecord kdata = (KafkaSerdeRecord) data; KafkaSerdeMetadata metadata = kdata.metadata(); - return artifactReference(metadata.getTopic(), metadata.isKey(), parsedSchema == null ? null : parsedSchema.getParsedSchema()); + return artifactReference(metadata.getTopic(), metadata.isKey(), + parsedSchema == null ? null : parsedSchema.getParsedSchema()); } } diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/SimpleTopicIdStrategy.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/SimpleTopicIdStrategy.java index a013503a6f..63894e5f7f 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/SimpleTopicIdStrategy.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/SimpleTopicIdStrategy.java @@ -9,15 +9,13 @@ public class SimpleTopicIdStrategy implements ArtifactReferenceResolverStrategy { /** - * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, io.apicurio.registry.resolver.ParsedSchema) + * @see io.apicurio.registry.resolver.strategy.ArtifactReferenceResolverStrategy#artifactReference(io.apicurio.registry.resolver.data.Record, + * io.apicurio.registry.resolver.ParsedSchema) */ @Override public ArtifactReference artifactReference(Record data, ParsedSchema parsedSchema) { KafkaSerdeRecord kdata = (KafkaSerdeRecord) data; - return ArtifactReference.builder() - .groupId(null) - .artifactId(kdata.metadata().getTopic()) - .build(); + return ArtifactReference.builder().groupId(null).artifactId(kdata.metadata().getTopic()).build(); } /** diff --git a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/TopicIdStrategy.java b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/TopicIdStrategy.java index c8f75ec6fc..09fe5e7679 100644 --- a/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/TopicIdStrategy.java +++ b/serdes/serde-common/src/main/java/io/apicurio/registry/serde/strategy/TopicIdStrategy.java @@ -13,8 +13,7 @@ public class TopicIdStrategy implements ArtifactReferenceResolverStrategy data, ParsedSchema parsedSchema) { KafkaSerdeRecord kdata = (KafkaSerdeRecord) data; KafkaSerdeMetadata metadata = kdata.metadata(); - return ArtifactReference.builder() - .groupId(null) + return ArtifactReference.builder().groupId(null) .artifactId(String.format("%s-%s", metadata.getTopic(), metadata.isKey() ? "key" : "value")) .build(); } diff --git a/serdes/serde-common/src/test/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerTest.java b/serdes/serde-common/src/test/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerTest.java index e79752ba69..c88b2a690c 100644 --- a/serdes/serde-common/src/test/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerTest.java +++ b/serdes/serde-common/src/test/java/io/apicurio/registry/serde/headers/DefaultHeadersHandlerTest.java @@ -1,19 +1,18 @@ package io.apicurio.registry.serde.headers; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; +import io.apicurio.registry.resolver.strategy.ArtifactReference; +import io.apicurio.registry.serde.SerdeConfig; +import io.apicurio.registry.utils.IoUtil; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.junit.jupiter.api.Test; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import org.apache.kafka.common.header.internals.RecordHeader; -import org.apache.kafka.common.header.internals.RecordHeaders; -import org.junit.jupiter.api.Test; - -import io.apicurio.registry.resolver.strategy.ArtifactReference; -import io.apicurio.registry.serde.SerdeConfig; -import io.apicurio.registry.utils.IoUtil; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class DefaultHeadersHandlerTest { @@ -21,11 +20,13 @@ public class DefaultHeadersHandlerTest { void testReadKeyHeadersHandlesPresentContentHash() { String contentHashHeaderName = "some key header name"; String contentHashValue = "context hash value"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); - RecordHeaders headers = new RecordHeaders(new RecordHeader[]{new RecordHeader(contentHashHeaderName, IoUtil.toBytes(contentHashValue))}); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + RecordHeaders headers = new RecordHeaders(new RecordHeader[] { + new RecordHeader(contentHashHeaderName, IoUtil.toBytes(contentHashValue)) }); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, true); - + ArtifactReference artifact = handler.readHeaders(headers); assertEquals(contentHashValue, artifact.getContentHash()); @@ -34,11 +35,12 @@ void testReadKeyHeadersHandlesPresentContentHash() { @Test void testReadKeyHeadersHandlesMissingContentHash() { String contentHashHeaderName = "another key header name"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); - RecordHeaders headers = new RecordHeaders(new RecordHeader[]{}); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + RecordHeaders headers = new RecordHeaders(new RecordHeader[] {}); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, true); - + ArtifactReference artifact = handler.readHeaders(headers); assertEquals(null, artifact.getContentHash()); @@ -48,11 +50,13 @@ void testReadKeyHeadersHandlesMissingContentHash() { void testReadValueHeadersHandlesPresentContentHash() { String contentHashHeaderName = "value header name"; String contentHashValue = "some value"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); - RecordHeaders headers = new RecordHeaders(new RecordHeader[]{new RecordHeader(contentHashHeaderName, IoUtil.toBytes(contentHashValue))}); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + RecordHeaders headers = new RecordHeaders(new RecordHeader[] { + new RecordHeader(contentHashHeaderName, IoUtil.toBytes(contentHashValue)) }); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, false); - + ArtifactReference artifact = handler.readHeaders(headers); assertEquals(contentHashValue, artifact.getContentHash()); @@ -61,11 +65,12 @@ void testReadValueHeadersHandlesPresentContentHash() { @Test void testReadValueHeadersHandlesMissingContentHash() { String contentHashHeaderName = "another value header name"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); - RecordHeaders headers = new RecordHeaders(new RecordHeader[]{}); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + RecordHeaders headers = new RecordHeaders(new RecordHeader[] {}); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, false); - + ArtifactReference artifact = handler.readHeaders(headers); assertEquals(null, artifact.getContentHash()); @@ -75,7 +80,8 @@ void testReadValueHeadersHandlesMissingContentHash() { void testWriteKeyHeadersHandlesPresentContentHash() { String contentHashHeaderName = "write key header name"; String contentHashValue = "some write key value"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); RecordHeaders headers = new RecordHeaders(); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, true); @@ -89,7 +95,8 @@ void testWriteKeyHeadersHandlesPresentContentHash() { @Test void testWriteKeyHeadersHandlesMissingContentHash() { String contentHashHeaderName = "another header name"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_KEY_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); RecordHeaders headers = new RecordHeaders(); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, true); @@ -104,7 +111,8 @@ void testWriteKeyHeadersHandlesMissingContentHash() { void testWriteValueHeadersHandlesPresentContentHash() { String contentHashHeaderName = "write value header name"; String contentHashValue = "some write value"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); RecordHeaders headers = new RecordHeaders(); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, false); @@ -118,7 +126,8 @@ void testWriteValueHeadersHandlesPresentContentHash() { @Test void testWriteValueHeadersHandlesMissingContentHash() { String contentHashHeaderName = "another write key header name"; - Map configs = Collections.singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); + Map configs = Collections + .singletonMap(SerdeConfig.HEADER_VALUE_CONTENT_HASH_OVERRIDE_NAME, contentHashHeaderName); RecordHeaders headers = new RecordHeaders(); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, false); @@ -142,10 +151,8 @@ void testWritesArtifactCoordinatesWhenContentHashPresent() { RecordHeaders headers = new RecordHeaders(); DefaultHeadersHandler handler = new DefaultHeadersHandler(); handler.configure(configs, false); - ArtifactReference artifact = ArtifactReference.builder() - .contentHash(contentHashValue) - .artifactId(artifactIdValue) - .build(); + ArtifactReference artifact = ArtifactReference.builder().contentHash(contentHashValue) + .artifactId(artifactIdValue).build(); handler.writeHeaders(headers, artifact); diff --git a/system-tests/pom.xml b/system-tests/pom.xml index 2fad2ade60..835f16161d 100644 --- a/system-tests/pom.xml +++ b/system-tests/pom.xml @@ -1,304 +1,302 @@ - - - apicurio-registry - io.apicurio - 3.0.0-SNAPSHOT - - 4.0.0 + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + - apicurio-registry-system-tests - apicurio-registry-system-tests + apicurio-registry-system-tests + apicurio-registry-system-tests - - 17 - 17 - 5.8.2 - 1.7.0 - 5.12.2 - 6.8.0 - 0.95.0 - 1.18.28 - 1.0.0-v2.0.0.final - 2.2 - 2.0.7 - 0.36.0 - 6.6.1.202309021850-r - 20231013 - 3.12.0 - 5.2.2 - 3.9.3 - + + 17 + 17 + 5.8.2 + 1.7.0 + 5.12.2 + 6.8.0 + 0.95.0 + 1.18.28 + 1.0.0-v2.0.0.final + 2.2 + 2.0.7 + 0.36.0 + 6.6.1.202309021850-r + 20231013 + 3.12.0 + 5.2.2 + 3.9.3 + - - - org.junit.jupiter - junit-jupiter - test - - - io.fabric8 - openshift-client - compile - - - io.fabric8 - kubernetes-client - - - io.fabric8 - kubernetes-model-core - - - io.fabric8 - kubernetes-model-networking - - - io.fabric8 - kubernetes-model-common - - - io.fabric8 - kubernetes-model-apiextensions - - - org.hamcrest - hamcrest - ${version.hamcrest} - test - - - org.slf4j - slf4j-api - ${version.slf4.simple} - - - org.slf4j - slf4j-simple - ${version.slf4.simple} - - - org.junit.jupiter - junit-jupiter-api - - - io.strimzi - api - ${version.strimzi.api} - - - org.eclipse.jgit - org.eclipse.jgit - ${version.eclipse.jgit} - - - org.json - json - ${version.org.json} - - - org.apache.commons - commons-lang3 - ${version.apache.commons} - - - org.apache.httpcomponents.core5 - httpcore5 - ${version.apache.httpcomponents} - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - - - org.apache.maven - maven-artifact - ${version.apache.maven.artifact} - - - io.sundr - builder-annotations - ${sundrio.version} - provided - - - org.projectlombok - lombok - ${lombok.version} - provided - - - javax.validation - validation-api - 2.0.1.Final - provided - - + + + org.junit.jupiter + junit-jupiter + test + + + io.fabric8 + openshift-client + compile + + + io.fabric8 + kubernetes-client + + + io.fabric8 + kubernetes-model-core + + + io.fabric8 + kubernetes-model-networking + + + io.fabric8 + kubernetes-model-common + + + io.fabric8 + kubernetes-model-apiextensions + + + org.hamcrest + hamcrest + ${version.hamcrest} + test + + + org.slf4j + slf4j-api + ${version.slf4.simple} + + + org.slf4j + slf4j-simple + ${version.slf4.simple} + + + org.junit.jupiter + junit-jupiter-api + + + io.strimzi + api + ${version.strimzi.api} + + + org.eclipse.jgit + org.eclipse.jgit + ${version.eclipse.jgit} + + + org.json + json + ${version.org.json} + + + org.apache.commons + commons-lang3 + ${version.apache.commons} + + + org.apache.httpcomponents.core5 + httpcore5 + ${version.apache.httpcomponents} + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + + + org.apache.maven + maven-artifact + ${version.apache.maven.artifact} + + + io.sundr + builder-annotations + ${sundrio.version} + provided + + + org.projectlombok + lombok + ${lombok.version} + provided + + + javax.validation + validation-api + 2.0.1.Final + provided + + - - - - - org.apache.maven.plugins - maven-compiler-plugin - - - org.apache.maven.plugins - maven-failsafe-plugin - 3.1.2 - - - org.apache.maven.plugins - maven-surefire-plugin - - - - - - maven-surefire-plugin - - - org.apache.maven.plugins - maven-jar-plugin - - - - true - true - - - - - - org.apache.maven.plugins - maven-failsafe-plugin - - - - integration-test - verify - - - - **/IT*.java - **/*IT.java - **/ST*.java - **/*ST.java - - true - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.3.0 - - checkstyle.xml - UTF-8 - true - true - false - - - - validate - validate - - check - - - - + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.1.2 + + + org.apache.maven.plugins + maven-surefire-plugin + + + + + + maven-surefire-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + true + true + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + integration-test + verify + + + + **/IT*.java + **/*IT.java + **/ST*.java + **/*ST.java + + true + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.3.0 + + checkstyle.xml + UTF-8 + true + true + false + + + + validate + + check + + validate + + + - - com.googlecode.maven-download-plugin - download-maven-plugin - 1.7.1 - - - - generate-sources - - wget - - - https://raw.githubusercontent.com/Apicurio/apicurio-registry-operator/${operator.version}/packagemanifests/${operator.version}/registry.apicur.io_apicurioregistries.yaml - registry.apicur.io_apicurioregistries.yaml - ${project.build.directory} - - - - - - io.fabric8 - java-generator-maven-plugin - ${version.fabric8-java-gen} - - - generate-sources - - generate - - - - - ${project.build.directory}/registry.apicur.io_apicurioregistries.yaml - true - - - + + com.googlecode.maven-download-plugin + download-maven-plugin + 1.7.1 + + + + wget + + + generate-sources + + https://raw.githubusercontent.com/Apicurio/apicurio-registry-operator/${operator.version}/packagemanifests/${operator.version}/registry.apicur.io_apicurioregistries.yaml + registry.apicur.io_apicurioregistries.yaml + ${project.build.directory} + + + + + + io.fabric8 + java-generator-maven-plugin + ${version.fabric8-java-gen} + + ${project.build.directory}/registry.apicur.io_apicurioregistries.yaml + true + + + + + generate + + generate-sources + + + + - - - - default - - default - - - - olm - - olm & !bundle - false - - - - olm-clusterwide - - olm-clusterwide & !bundle & !olm-namespace - false - - - - olm-namespace - - olm-namespace & !bundle & !olm-clusterwide - false - - - - bundle - - bundle & !olm - false - - - + + + + default + + default + + + + olm + + olm & !bundle + false + + + + olm-clusterwide + + olm-clusterwide & !bundle & !olm-namespace + false + + + + olm-namespace + + olm-namespace & !bundle & !olm-clusterwide + false + + + + bundle + + bundle & !olm + false + + + diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ApicurioRegistryApiClient.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ApicurioRegistryApiClient.java index 064731f0d0..03e48e76da 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ApicurioRegistryApiClient.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ApicurioRegistryApiClient.java @@ -104,17 +104,15 @@ public boolean waitServiceAvailable() { public boolean createArtifact(String groupId, String id, String type, String content) { // Get request URI - URI uri = HttpClientUtils.buildURI( - "http://%s:%d/apis/registry/v2/groups/%s/artifacts", host, port, groupId - ); + URI uri = HttpClientUtils.buildURI("http://%s:%d/apis/registry/v2/groups/%s/artifacts", host, port, + groupId); // Get request builder HttpRequest.Builder requestBuilder = HttpClientUtils.newBuilder() // Set request URI .uri(uri) // Set common request headers - .header("Content-Type", "application/json") - .header("X-Registry-ArtifactId", id) + .header("Content-Type", "application/json").header("X-Registry-ArtifactId", id) .header("X-Registry-ArtifactType", type.name()) // Set request type and content .POST(HttpRequest.BodyPublishers.ofString(content)); @@ -142,9 +140,8 @@ public boolean createArtifact(String groupId, String id, String type, String con public String readArtifactContent(String group, String id) { // Get request URI - URI uri = HttpClientUtils.buildURI( - "http://%s:%d/apis/registry/v2/groups/%s/artifacts/%s", host, port, group, id - ); + URI uri = HttpClientUtils.buildURI("http://%s:%d/apis/registry/v2/groups/%s/artifacts/%s", host, port, + group, id); // Get request builder HttpRequest.Builder requestBuilder = HttpClientUtils.newBuilder() @@ -176,9 +173,8 @@ public String readArtifactContent(String group, String id) { public boolean deleteArtifact(String group, String id) { // Get request URL - URI uri = HttpClientUtils.buildURI( - "http://%s:%d/apis/registry/v2/groups/%s/artifacts/%s", host, port, group, id - ); + URI uri = HttpClientUtils.buildURI("http://%s:%d/apis/registry/v2/groups/%s/artifacts/%s", host, port, + group, id); // Get request builder HttpRequest.Builder requestBuilder = HttpClientUtils.newBuilder() @@ -214,12 +210,8 @@ public ArtifactList listArtifacts() { public ArtifactList listArtifacts(int limit) { // Get request URI - URI uri = HttpClientUtils.buildURI( - "http://%s:%d/apis/registry/v2/search/artifacts?limit=%d", - host, - port, - limit - ); + URI uri = HttpClientUtils.buildURI("http://%s:%d/apis/registry/v2/search/artifacts?limit=%d", host, + port, limit); // Get request builder HttpRequest.Builder requestBuilder = HttpClientUtils.newBuilder() diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactContent.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactContent.java index 7a464617f4..527d214b5c 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactContent.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactContent.java @@ -1,6 +1,6 @@ package io.apicurio.registry.systemtests.client; public final class ArtifactContent { - public static final String DEFAULT_AVRO = "{\"name\":\"price\",\"namespace\":\"com.example\",\"type\":\"record\"," + - "\"fields\":[{\"name\":\"symbol\",\"type\":\"string\"},{\"name\":\"price\",\"type\":\"string\"}]}"; + public static final String DEFAULT_AVRO = "{\"name\":\"price\",\"namespace\":\"com.example\",\"type\":\"record\"," + + "\"fields\":[{\"name\":\"symbol\",\"type\":\"string\"},{\"name\":\"price\",\"type\":\"string\"}]}"; } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactState.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactState.java index 807a120ef2..48c865f2d9 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactState.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactState.java @@ -1,7 +1,5 @@ package io.apicurio.registry.systemtests.client; public enum ArtifactState { - ENABLED, - DISABLED, - DEPRECATED + ENABLED, DISABLED, DEPRECATED } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactType.java index 618b480138..38b13f2e3b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/client/ArtifactType.java @@ -1,14 +1,5 @@ package io.apicurio.registry.systemtests.client; public enum ArtifactType { - AVRO, - PROTOBUF, - JSON, - OPENAPI, - ASYNCAPI, - GRAPHQL, - KCONNECT, - WSDL, - XSD, - XML + AVRO, PROTOBUF, JSON, OPENAPI, ASYNCAPI, GRAPHQL, KCONNECT, WSDL, XSD, XML } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/executor/Exec.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/executor/Exec.java index 29efff1707..7a625460dc 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/executor/Exec.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/executor/Exec.java @@ -1,4 +1,5 @@ package io.apicurio.registry.systemtests.executor; + import io.apicurio.registry.systemtests.framework.LoggerUtils; import org.slf4j.Logger; @@ -111,19 +112,19 @@ public int exec(List commands) throws IOException, InterruptedException, * Method executes external command * * @param commands arguments for command - * @param timeout timeout in ms for kill + * @param timeout timeout in ms for kill * @return returns ecode of execution * @throws IOException * @throws InterruptedException * @throws ExecutionException */ - public int exec(List commands, int timeout) throws IOException, InterruptedException, ExecutionException { + public int exec(List commands, int timeout) + throws IOException, InterruptedException, ExecutionException { return exec(null, commands, timeout); } private ProcessBuilder getProcessBuilder(List commands) { - ProcessBuilder builder = new ProcessBuilder() - .command(commands) + ProcessBuilder builder = new ProcessBuilder().command(commands) .directory(new File(System.getProperty("user.dir"))); if (this.env != null) { @@ -164,17 +165,16 @@ private void processStdErr() throws ExecutionException, InterruptedException { /** * Method executes external command * - * @param input execute commands during execution + * @param input execute commands during execution * @param commands arguments for command - * @param timeout timeout in ms for kill + * @param timeout timeout in ms for kill * @return returns ecode of execution * @throws IOException * @throws InterruptedException * @throws ExecutionException */ - public int exec( - String input, List commands, int timeout - ) throws IOException, InterruptedException, ExecutionException { + public int exec(String input, List commands, int timeout) + throws IOException, InterruptedException, ExecutionException { LOGGER.debug("Running command - " + String.join(" ", commands.toArray(new String[0]))); process = getProcessBuilder(commands).start(); @@ -279,14 +279,7 @@ public static ExecutionResultData executeAndCheck(String input, List com } public static ExecutionResultData executeAndCheck(String input, int timeout, String... commands) { - ExecutionResultData results = execute( - Arrays.asList(commands), - timeout, - true, - true, - null, - input - ); + ExecutionResultData results = execute(Arrays.asList(commands), timeout, true, true, null, input); if (!results.getRetCode()) { throw new IllegalStateException(results.getStdErr()); @@ -295,9 +288,8 @@ public static ExecutionResultData executeAndCheck(String input, int timeout, Str return results; } - public static ExecutionResultData executeAndCheck( - List command, int timeout, boolean logToOutput, boolean appendLineSeparator, Map env - ) { + public static ExecutionResultData executeAndCheck(List command, int timeout, boolean logToOutput, + boolean appendLineSeparator, Map env) { ExecutionResultData results = execute(command, timeout, logToOutput, appendLineSeparator, env, null); if (!results.getRetCode()) { throw new IllegalStateException(results.getStdErr()); @@ -325,26 +317,18 @@ public static ExecutionResultData execute(List command, int timeout, boo return execute(command, timeout, logToOutput, true); } - public static ExecutionResultData execute( - List command, int timeout, boolean logToOutput, boolean appendLineSeparator - ) { + public static ExecutionResultData execute(List command, int timeout, boolean logToOutput, + boolean appendLineSeparator) { return execute(command, timeout, logToOutput, appendLineSeparator, null); } - public static ExecutionResultData execute( - List command, int timeout, boolean logToOutput, boolean appendLineSeparator, Map env - ) { + public static ExecutionResultData execute(List command, int timeout, boolean logToOutput, + boolean appendLineSeparator, Map env) { return execute(command, timeout, logToOutput, appendLineSeparator, env, null); } - public static ExecutionResultData execute( - List command, - int timeout, - boolean logToOutput, - boolean appendLineSeparator, - Map env, - String input - ) { + public static ExecutionResultData execute(List command, int timeout, boolean logToOutput, + boolean appendLineSeparator, Map env, String input) { Exec executor = new Exec(appendLineSeparator); try { executor.setEnv(env); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ApicurioRegistryUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ApicurioRegistryUtils.java index 8cd6a9ef5a..0c4ce8b074 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ApicurioRegistryUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ApicurioRegistryUtils.java @@ -18,11 +18,7 @@ public class ApicurioRegistryUtils { private static final Logger LOGGER = LoggerUtils.getLogger(); private static String getTruststoreSecretName(ApicurioRegistry registry) { - Security security = registry - .getSpec() - .getConfiguration() - .getKafkasql() - .getSecurity(); + Security security = registry.getSpec().getConfiguration().getKafkasql().getSecurity(); if (security.getTls() != null) { return security.getTls().getTruststoreSecretName(); @@ -34,11 +30,7 @@ private static String getTruststoreSecretName(ApicurioRegistry registry) { } private static String getKeystoreSecretName(ApicurioRegistry registry) { - Security security = registry - .getSpec() - .getConfiguration() - .getKafkasql() - .getSecurity(); + Security security = registry.getSpec().getConfiguration().getKafkasql().getSecurity(); if (security.getTls() != null) { return security.getTls().getKeystoreSecretName(); @@ -47,15 +39,11 @@ private static String getKeystoreSecretName(ApicurioRegistry registry) { return null; } - public static ApicurioRegistry deployDefaultApicurioRegistrySql( - ExtensionContext testContext, - boolean useKeycloak - ) throws InterruptedException { + public static ApicurioRegistry deployDefaultApicurioRegistrySql(ExtensionContext testContext, + boolean useKeycloak) throws InterruptedException { // Get Apicurio Registry - ApicurioRegistry apicurioRegistrySql = ApicurioRegistryResourceType.getDefaultSql( - Constants.REGISTRY, - Environment.NAMESPACE - ); + ApicurioRegistry apicurioRegistrySql = ApicurioRegistryResourceType.getDefaultSql(Constants.REGISTRY, + Environment.NAMESPACE); if (useKeycloak) { ApicurioRegistryResourceType.updateWithDefaultKeycloak(apicurioRegistrySql); @@ -67,15 +55,11 @@ public static ApicurioRegistry deployDefaultApicurioRegistrySql( return apicurioRegistrySql; } - public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlNoAuth( - ExtensionContext testContext, - boolean useKeycloak - ) throws InterruptedException { + public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlNoAuth(ExtensionContext testContext, + boolean useKeycloak) throws InterruptedException { // Get Apicurio Registry - ApicurioRegistry apicurioRegistryKafkasqlNoAuth = ApicurioRegistryResourceType.getDefaultKafkasql( - Constants.REGISTRY, - Environment.NAMESPACE - ); + ApicurioRegistry apicurioRegistryKafkasqlNoAuth = ApicurioRegistryResourceType + .getDefaultKafkasql(Constants.REGISTRY, Environment.NAMESPACE); if (useKeycloak) { ApicurioRegistryResourceType.updateWithDefaultKeycloak(apicurioRegistryKafkasqlNoAuth); @@ -87,34 +71,22 @@ public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlNoAuth( return apicurioRegistryKafkasqlNoAuth; } - public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlTLS( - ExtensionContext testContext, - Kafka kafka, - boolean useKeycloak - ) throws InterruptedException { + public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlTLS(ExtensionContext testContext, + Kafka kafka, boolean useKeycloak) throws InterruptedException { // Get Apicurio Registry - ApicurioRegistry apicurioRegistryKafkasqlTLS = ApicurioRegistryResourceType.getDefaultKafkasql( - Constants.REGISTRY, - Environment.NAMESPACE - ); + ApicurioRegistry apicurioRegistryKafkasqlTLS = ApicurioRegistryResourceType + .getDefaultKafkasql(Constants.REGISTRY, Environment.NAMESPACE); // Update Apicurio Registry to have TLS configuration ApicurioRegistryResourceType.updateWithDefaultTLS(apicurioRegistryKafkasqlTLS); - CertificateUtils.createTruststore( - testContext, - kafka.getMetadata().getNamespace(), + CertificateUtils.createTruststore(testContext, kafka.getMetadata().getNamespace(), kafka.getMetadata().getName() + "-cluster-ca-cert", - getTruststoreSecretName(apicurioRegistryKafkasqlTLS) - ); + getTruststoreSecretName(apicurioRegistryKafkasqlTLS)); - CertificateUtils.createKeystore( - testContext, - kafka.getMetadata().getNamespace(), - Constants.KAFKA_USER, + CertificateUtils.createKeystore(testContext, kafka.getMetadata().getNamespace(), Constants.KAFKA_USER, getKeystoreSecretName(apicurioRegistryKafkasqlTLS), - kafka.getMetadata().getName() + "-kafka-bootstrap" - ); + kafka.getMetadata().getName() + "-kafka-bootstrap"); if (useKeycloak) { ApicurioRegistryResourceType.updateWithDefaultKeycloak(apicurioRegistryKafkasqlTLS); @@ -126,26 +98,18 @@ public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlTLS( return apicurioRegistryKafkasqlTLS; } - public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlSCRAM( - ExtensionContext testContext, - Kafka kafka, - boolean useKeycloak - ) throws InterruptedException { + public static ApicurioRegistry deployDefaultApicurioRegistryKafkasqlSCRAM(ExtensionContext testContext, + Kafka kafka, boolean useKeycloak) throws InterruptedException { // Get Apicurio Registry - ApicurioRegistry apicurioRegistryKafkasqlSCRAM = ApicurioRegistryResourceType.getDefaultKafkasql( - Constants.REGISTRY, - Environment.NAMESPACE - ); + ApicurioRegistry apicurioRegistryKafkasqlSCRAM = ApicurioRegistryResourceType + .getDefaultKafkasql(Constants.REGISTRY, Environment.NAMESPACE); // Update to have SCRAM configuration ApicurioRegistryResourceType.updateWithDefaultSCRAM(apicurioRegistryKafkasqlSCRAM); - CertificateUtils.createTruststore( - testContext, - kafka.getMetadata().getNamespace(), + CertificateUtils.createTruststore(testContext, kafka.getMetadata().getNamespace(), kafka.getMetadata().getName() + "-cluster-ca-cert", - getTruststoreSecretName(apicurioRegistryKafkasqlSCRAM) - ); + getTruststoreSecretName(apicurioRegistryKafkasqlSCRAM)); if (useKeycloak) { ApicurioRegistryResourceType.updateWithDefaultKeycloak(apicurioRegistryKafkasqlSCRAM); @@ -214,10 +178,8 @@ public static boolean isApicurioRegistryHostnameReady(ApicurioRegistry apicurioR String registryRouteNamespace = registryRoute.getMetadata().getNamespace(); String registryRouteName = registryRoute.getMetadata().getName(); - return ( - Kubernetes.isRouteReady(registryRouteNamespace, registryRouteName) - && !defaultRegistryHostname.equals(Kubernetes.getRouteHost(registryRouteNamespace, registryRouteName)) - ); + return (Kubernetes.isRouteReady(registryRouteNamespace, registryRouteName) && !defaultRegistryHostname + .equals(Kubernetes.getRouteHost(registryRouteNamespace, registryRouteName))); } public static boolean waitApicurioRegistryReady(ApicurioRegistry apicurioRegistry) { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/CertificateUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/CertificateUtils.java index 221ec80e2d..827827e2b3 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/CertificateUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/CertificateUtils.java @@ -29,16 +29,8 @@ public class CertificateUtils { * @param publicKey Path to the public key to be imported */ private static void runTruststoreCmd(Path path, String password, Path publicKey) { - Exec.executeAndCheck( - "keytool", - "-keystore", path.toString(), - "-storepass", password, - "-noprompt", - "-alias", "ca", - "-import", - "-file", publicKey.toString(), - "-storetype", "PKCS12" - ); + Exec.executeAndCheck("keytool", "-keystore", path.toString(), "-storepass", password, "-noprompt", + "-alias", "ca", "-import", "-file", publicKey.toString(), "-storetype", "PKCS12"); } /** @@ -47,25 +39,14 @@ private static void runTruststoreCmd(Path path, String password, Path publicKey) * @param publicKey Public key to be imported * @param privateKey Private key to be imported */ - private static void runKeystoreCmd(Path path, String password, Path publicKey, Path privateKey, String hostname) { - List commands = List.of( - "openssl", - "pkcs12", - "-export", - "-in", publicKey.toString(), - "-inkey", privateKey.toString(), - "-name", hostname, - "-password", "pass:" + password, - "-out", path.toString() - ); - - Exec.executeAndCheck( - commands, - 60_000, - true, - true, - Collections.singletonMap("RANDFILE", Environment.getTmpPath(".rnd").toString()) - ); + private static void runKeystoreCmd(Path path, String password, Path publicKey, Path privateKey, + String hostname) { + List commands = List.of("openssl", "pkcs12", "-export", "-in", publicKey.toString(), "-inkey", + privateKey.toString(), "-name", hostname, "-password", "pass:" + password, "-out", + path.toString()); + + Exec.executeAndCheck(commands, 60_000, true, true, + Collections.singletonMap("RANDFILE", Environment.getTmpPath(".rnd").toString())); } private static String encode(Path path) { @@ -81,9 +62,7 @@ private static String encode(String data) { } private static String decode(String data) { - return new String( - Base64.getDecoder().decode(data) - ); + return new String(Base64.getDecoder().decode(data)); } private static String decodeBase64Secret(String namespace, String name, String key) { @@ -98,26 +77,16 @@ private static void writeToFile(String data, Path path) { } } - private static void createSecret( - ExtensionContext testContext, String namespace, String name, Map secretData - ) throws InterruptedException { - Secret secret = new SecretBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .addToData(secretData) - .build(); - - ResourceManager.getInstance().createResource( true, secret); + private static void createSecret(ExtensionContext testContext, String namespace, String name, + Map secretData) throws InterruptedException { + Secret secret = new SecretBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().addToData(secretData).build(); + + ResourceManager.getInstance().createResource(true, secret); } - public static void createTruststore( - ExtensionContext testContext, - String namespace, - String caCertSecretName, - String truststoreSecretName - ) throws InterruptedException { + public static void createTruststore(ExtensionContext testContext, String namespace, + String caCertSecretName, String truststoreSecretName) throws InterruptedException { LOGGER.info("Preparing truststore..."); String timestamp = String.valueOf(Instant.now().getEpochSecond()); @@ -131,21 +100,19 @@ public static void createTruststore( runTruststoreCmd(truststorePath, truststorePassword, caPath); - Map secretData = new HashMap<>() {{ - put("ca.p12", encode(truststorePath)); - put("ca.password", encode(truststorePassword)); - }}; + Map secretData = new HashMap<>() { + { + put("ca.p12", encode(truststorePath)); + put("ca.password", encode(truststorePassword)); + } + }; createSecret(testContext, namespace, truststoreSecretName, secretData); } - public static void createKeystore( - ExtensionContext testContext, - String namespace, - String clientCertSecretName, - String keystoreSecretName, - String hostname - ) throws InterruptedException { + public static void createKeystore(ExtensionContext testContext, String namespace, + String clientCertSecretName, String keystoreSecretName, String hostname) + throws InterruptedException { LOGGER.info("Preparing keystore..."); String timestamp = String.valueOf(Instant.now().getEpochSecond()); @@ -162,10 +129,12 @@ public static void createKeystore( runKeystoreCmd(keystorePath, keystorePassword, userCertPath, userKeyPath, hostname); - Map secretData = new HashMap<>() {{ - put("user.p12", encode(keystorePath)); - put("user.password", encode(keystorePassword)); - }}; + Map secretData = new HashMap<>() { + { + put("user.p12", encode(keystorePath)); + put("user.password", encode(keystorePassword)); + } + }; createSecret(testContext, namespace, keystoreSecretName, secretData); } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Constants.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Constants.java index c25cb3c03e..af0d9194f5 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Constants.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Constants.java @@ -1,13 +1,13 @@ package io.apicurio.registry.systemtests.framework; - public class Constants { public static final String CATALOG_NAME = "testsuite-operators"; public static final String CATALOG_NAMESPACE = "openshift-marketplace"; public static final String KAFKA_CONNECT = "kafka-connect-for-registry"; public static final String KAFKA = "kafka-for-registry"; public static final String KAFKA_USER = "kafka-user-for-registry"; - public static final String REGISTRY_OPERATOR_DEPLOYMENT = "apicurio-registry-operator"; // Default from catalog + public static final String REGISTRY_OPERATOR_DEPLOYMENT = "apicurio-registry-operator"; // Default from + // catalog public static final String REGISTRY = "registry"; public static final String SSO_CLIENT_API = "registry-client-api"; // Defined in kubefiles/keycloak public static final String SSO_CLIENT_UI = "registry-client-ui"; // Defined in kubefiles/keycloak diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/DatabaseUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/DatabaseUtils.java index b9f7141b54..1e387c71f2 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/DatabaseUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/DatabaseUtils.java @@ -11,7 +11,8 @@ public class DatabaseUtils { public static void deployDefaultPostgresqlDatabase(ExtensionContext testContext) { - PersistentVolumeClaim persistentVolumeClaim = PersistentVolumeClaimResourceType.getDefaultPostgresql(); + PersistentVolumeClaim persistentVolumeClaim = PersistentVolumeClaimResourceType + .getDefaultPostgresql(); Deployment deployment = DeploymentResourceType.getDefaultPostgresql(); Service service = ServiceResourceType.getDefaultPostgresql(); @@ -25,10 +26,8 @@ public static void deployDefaultPostgresqlDatabase(ExtensionContext testContext) } public static void deployPostgresqlDatabase(ExtensionContext testContext, String name, String namespace) { - PersistentVolumeClaim persistentVolumeClaim = PersistentVolumeClaimResourceType.getDefaultPostgresql( - name, - namespace - ); + PersistentVolumeClaim persistentVolumeClaim = PersistentVolumeClaimResourceType + .getDefaultPostgresql(name, namespace); Deployment deployment = DeploymentResourceType.getDefaultPostgresql(name, namespace); Service service = ServiceResourceType.getDefaultPostgresql(name, namespace); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Environment.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Environment.java index 50c6ccc167..90849a1bc1 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Environment.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/Environment.java @@ -27,9 +27,8 @@ public final class Environment { public static final String SSO_CATALOG_DEFAULT = "community-operators"; public static final String KAFKA_DEPLOYMENT_DEFAULT = "strimzi-cluster-operator"; // Default from catalog public static final String KAFKA_PACKAGE_DEFAULT = "strimzi-kafka-operator"; // Default from catalog - public static final String REGISTRY_BUNDLE_DEFAULT = - "https://raw.githubusercontent.com/Apicurio/apicurio-registry-operator/main/install/" + - "apicurio-registry-operator-1.0.0-v2.0.0.final.yaml"; + public static final String REGISTRY_BUNDLE_DEFAULT = "https://raw.githubusercontent.com/Apicurio/apicurio-registry-operator/main/install/" + + "apicurio-registry-operator-1.0.0-v2.0.0.final.yaml"; public static final String REGISTRY_PACKAGE_DEFAULT = "apicurio-registry"; // Default from catalog public static final String SSO_PACKAGE_DEFAULT = "keycloak-operator"; // Default from catalog public static final String TESTSUITE_PATH_DEFAULT = System.getProperty("user.dir"); @@ -40,27 +39,30 @@ public final class Environment { public static final String CATALOG = getOrDefault(CATALOG_ENV_VAR, CATALOG_DEFAULT); public static final String SSO_CATALOG = getOrDefault(SSO_CATALOG_ENV_VAR, SSO_CATALOG_DEFAULT); - public static final String CATALOG_NAMESPACE = getOrDefault(CATALOG_NAMESPACE_ENV_VAR, Constants.CATALOG_NAMESPACE); + public static final String CATALOG_NAMESPACE = getOrDefault(CATALOG_NAMESPACE_ENV_VAR, + Constants.CATALOG_NAMESPACE); public static final String CONVERTERS_SHA512SUM = get(CONVERTERS_SHA512SUM_ENV_VAR); public static final String CONVERTERS_URL = get(CONVERTERS_URL_ENV_VAR); - public static final String KAFKA_DEPLOYMENT = getOrDefault(KAFKA_DEPLOYMENT_ENV_VAR, KAFKA_DEPLOYMENT_DEFAULT); + public static final String KAFKA_DEPLOYMENT = getOrDefault(KAFKA_DEPLOYMENT_ENV_VAR, + KAFKA_DEPLOYMENT_DEFAULT); public static final String KAFKA_PACKAGE = getOrDefault(KAFKA_PACKAGE_ENV_VAR, KAFKA_PACKAGE_DEFAULT); - public static final String REGISTRY_BUNDLE = getOrDefault(REGISTRY_BUNDLE_ENV_VAR, REGISTRY_BUNDLE_DEFAULT); - public static final String REGISTRY_PACKAGE = getOrDefault(REGISTRY_PACKAGE_ENV_VAR, REGISTRY_PACKAGE_DEFAULT); + public static final String REGISTRY_BUNDLE = getOrDefault(REGISTRY_BUNDLE_ENV_VAR, + REGISTRY_BUNDLE_DEFAULT); + public static final String REGISTRY_PACKAGE = getOrDefault(REGISTRY_PACKAGE_ENV_VAR, + REGISTRY_PACKAGE_DEFAULT); public static final String SSO_PACKAGE = getOrDefault(SSO_PACKAGE_ENV_VAR, SSO_PACKAGE_DEFAULT); public static final String TESTSUITE_PATH = getOrDefault(TESTSUITE_PATH_ENV_VAR, TESTSUITE_PATH_DEFAULT); public static final String TMP_PATH = getOrDefault(TMP_PATH_ENV_VAR, TMP_PATH_DEFAULT); public static final String NAMESPACE = getOrDefault(FORCE_NAMESPACE, Constants.TESTSUITE_NAMESPACE); - public static final String KAFKA_BUNDLE_DEFAULT = - "https://strimzi.io/install/latest?namespace=" + NAMESPACE; + public static final String KAFKA_BUNDLE_DEFAULT = "https://strimzi.io/install/latest?namespace=" + + NAMESPACE; public static final String KAFKA_BUNDLE = getOrDefault(KAFKA_BUNDLE_ENV_VAR, KAFKA_BUNDLE_DEFAULT); - public static final String CLUSTER_WIDE_NAMESPACE = "openshift-operators"; - + public static final String CLUSTER_WIDE_NAMESPACE = "openshift-operators"; private static String get(String key) { return System.getenv().get(key); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/HttpClientUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/HttpClientUtils.java index 527df6cb4a..daeb7e601b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/HttpClientUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/HttpClientUtils.java @@ -18,8 +18,7 @@ public static URI buildURI(String uri, Object... args) { public static HttpResponse processRequest(HttpRequest request) { try { - return HttpClient.newHttpClient() - .send(request, HttpResponse.BodyHandlers.ofString()); + return HttpClient.newHttpClient().send(request, HttpResponse.BodyHandlers.ofString()); } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KafkaUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KafkaUtils.java index 09fc3ab3a8..aca85fc201 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KafkaUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KafkaUtils.java @@ -43,7 +43,8 @@ private static boolean waitSecretReady(String namespace, String name, TimeoutBud return true; } - public static void createSecuredUser(ExtensionContext testContext, String username, Kafka kafka, KafkaKind kind) throws InterruptedException { + public static void createSecuredUser(ExtensionContext testContext, String username, Kafka kafka, + KafkaKind kind) throws InterruptedException { String namespace = kafka.getMetadata().getNamespace(); String kafkaName = kafka.getMetadata().getName(); String kafkaCaSecretName = kafkaName + "-cluster-ca-cert"; @@ -55,13 +56,12 @@ public static void createSecuredUser(ExtensionContext testContext, String userna LOGGER.error("Secret with name {} is not present in namespace {}.", kafkaCaSecretName, namespace); } - ResourceManager.getInstance().createResource( - true, - KafkaUserResourceType.getDefaultByKind(username, namespace, kafkaName, kind) - ); + ResourceManager.getInstance().createResource(true, + KafkaUserResourceType.getDefaultByKind(username, namespace, kafkaName, kind)); } - public static Kafka deployDefaultKafkaByKind(ExtensionContext testContext, KafkaKind kind) throws InterruptedException { + public static Kafka deployDefaultKafkaByKind(ExtensionContext testContext, KafkaKind kind) + throws InterruptedException { Kafka kafka = KafkaResourceType.getDefaultByKind(kind); ResourceManager.getInstance().createResource(true, kafka); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KeycloakUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KeycloakUtils.java index 6aefca97da..1001f4f43d 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KeycloakUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/KeycloakUtils.java @@ -38,12 +38,8 @@ public static void deployKeycloak() throws InterruptedException { private static void deployKeycloakPostgres(String namespace) throws URISyntaxException { URL dtb = KeycloakUtils.class.getClassLoader().getResource("postgres.yaml"); - Exec.executeAndCheck( - "oc", - "apply", - "-n", namespace, - "-f", Paths.get(dtb.toURI()).toFile().toString() - ); + Exec.executeAndCheck("oc", "apply", "-n", namespace, "-f", + Paths.get(dtb.toURI()).toFile().toString()); ResourceUtils.waitStatefulSetReady(namespace, "postgresql-db"); } @@ -52,33 +48,24 @@ public static void deployKeycloak(String namespace) throws InterruptedException LOGGER.info("Deploying Keycloak..."); ResourceManager manager = ResourceManager.getInstance(); // Deploy Keycloak server - Exec.executeAndCheck( - "oc", - "apply", - "-n", namespace, - "-f", getKeycloakFilePath("keycloak.yaml") - ); + Exec.executeAndCheck("oc", "apply", "-n", namespace, "-f", getKeycloakFilePath("keycloak.yaml")); // Wait for Keycloak server to be ready ResourceUtils.waitStatefulSetReady(namespace, "keycloak"); // Create Keycloak HTTP Service and wait for its readiness - manager.createSharedResource( true, ServiceResourceType.getDefaultKeycloakHttp(namespace)); + manager.createSharedResource(true, ServiceResourceType.getDefaultKeycloakHttp(namespace)); // Create Keycloak Route and wait for its readiness - manager.createSharedResource( true, RouteResourceType.getDefaultKeycloak(namespace)); + manager.createSharedResource(true, RouteResourceType.getDefaultKeycloak(namespace)); // Log Keycloak URL LOGGER.info("Keycloak URL: {}", getDefaultKeycloakURL(namespace)); // TODO: Wait for Keycloak Realm readiness, but API model not available // Create Keycloak Realm - Exec.executeAndCheck( - "oc", - "apply", - "-n", namespace, - "-f", getKeycloakFilePath("keycloak-realm.yaml") - ); + Exec.executeAndCheck("oc", "apply", "-n", namespace, "-f", + getKeycloakFilePath("keycloak-realm.yaml")); LOGGER.info("Keycloak should be deployed."); } @@ -86,24 +73,15 @@ public static void deployKeycloak(String namespace) throws InterruptedException public static void removeKeycloakRealm(String namespace) { LOGGER.info("Removing keycloak realm"); - Exec.executeAndCheck( - "oc", - "delete", - "-n", namespace, - "-f", getKeycloakFilePath("keycloak-realm.yaml") - ); + Exec.executeAndCheck("oc", "delete", "-n", namespace, "-f", + getKeycloakFilePath("keycloak-realm.yaml")); } public static void removeKeycloak(String namespace) throws InterruptedException { removeKeycloakRealm(namespace); Thread.sleep(Duration.ofMinutes(2).toMillis()); LOGGER.info("Removing Keycloak..."); - Exec.executeAndCheck( - "oc", - "delete", - "-n", namespace, - "-f", getKeycloakFilePath("keycloak.yaml") - ); + Exec.executeAndCheck("oc", "delete", "-n", namespace, "-f", getKeycloakFilePath("keycloak.yaml")); LOGGER.info("Keycloak should be removed."); } @@ -138,15 +116,17 @@ private static HttpRequest.BodyPublisher ofFormData(Map data) { public static String getAccessToken(ApicurioRegistry apicurioRegistry, String username, String password) { // Get Keycloak URL of Apicurio Registry - String keycloakUrl = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak().getUrl(); + String keycloakUrl = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak() + .getUrl(); // Get Keycloak Realm of Apicurio Registry - String keycloakRealm = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak().getRealm(); + String keycloakRealm = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak() + .getRealm(); // Construct token API URI of Keycloak Realm - URI keycloakRealmUrl = HttpClientUtils.buildURI( - "%s/realms/%s/protocol/openid-connect/token", keycloakUrl, keycloakRealm - ); + URI keycloakRealmUrl = HttpClientUtils.buildURI("%s/realms/%s/protocol/openid-connect/token", + keycloakUrl, keycloakRealm); // Get Keycloak API client ID of Apicurio Registry - String clientId = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak().getApiClientId(); + String clientId = apicurioRegistry.getSpec().getConfiguration().getSecurity().getKeycloak() + .getApiClientId(); // Prepare request data Map data = new HashMap<>(); @@ -158,11 +138,8 @@ public static String getAccessToken(ApicurioRegistry apicurioRegistry, String us LOGGER.info("Requesting access token from {}...", keycloakRealmUrl); // Create request - HttpRequest request = HttpClientUtils.newBuilder() - .uri(keycloakRealmUrl) - .header("Content-Type", "application/x-www-form-urlencoded") - .POST(ofFormData(data)) - .build(); + HttpRequest request = HttpClientUtils.newBuilder().uri(keycloakRealmUrl) + .header("Content-Type", "application/x-www-form-urlencoded").POST(ofFormData(data)).build(); // Process request HttpResponse response = HttpClientUtils.processRequest(request); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/OperatorUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/OperatorUtils.java index b2f55c0e9a..b6e3a1e64c 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/OperatorUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/OperatorUtils.java @@ -31,11 +31,8 @@ public class OperatorUtils { private static final Logger LOGGER = LoggerUtils.getLogger(); public static List listFiles(Path directory) throws IOException { - return Files.list(directory) - .filter(file -> !Files.isDirectory(file)) - .map(Path::getFileName) - .map(Path::toString) - .collect(Collectors.toList()); + return Files.list(directory).filter(file -> !Files.isDirectory(file)).map(Path::getFileName) + .map(Path::toString).collect(Collectors.toList()); } public static Deployment findDeployment(List resourceList) { @@ -56,7 +53,8 @@ public static void downloadFile(String source, Path destination) throws Exceptio } } - public static boolean waitPodsExist(String namespace, String labelKey, String labelValue, TimeoutBudget timeout) { + public static boolean waitPodsExist(String namespace, String labelKey, String labelValue, + TimeoutBudget timeout) { while (!timeout.timeoutExpired()) { if (Kubernetes.getPods(namespace, labelKey, labelValue).getItems().size() > 0) { return true; @@ -72,10 +70,8 @@ public static boolean waitPodsExist(String namespace, String labelKey, String la } if (Kubernetes.getPods(namespace, labelKey, labelValue).getItems().size() == 0) { - LOGGER.error( - "Pod(s) of catalog source in namespace {} with label {}={} failed creation check.", - namespace, labelKey, labelValue - ); + LOGGER.error("Pod(s) of catalog source in namespace {} with label {}={} failed creation check.", + namespace, labelKey, labelValue); return false; } @@ -84,7 +80,8 @@ public static boolean waitPodsExist(String namespace, String labelKey, String la } public static boolean waitPodsExist(String namespace, String labelKey, String labelValue) { - return waitPodsExist(namespace, labelKey, labelValue, TimeoutBudget.ofDuration(Duration.ofMinutes(3))); + return waitPodsExist(namespace, labelKey, labelValue, + TimeoutBudget.ofDuration(Duration.ofMinutes(3))); } private static boolean collectPodsReadiness(PodList podList) { @@ -94,11 +91,8 @@ private static boolean collectPodsReadiness(PodList podList) { for (Pod p : podList.getItems()) { boolean podReady = false; - if ( - p.getStatus() != null - && p.getStatus().getContainerStatuses() != null - && p.getStatus().getContainerStatuses().size() > 0 - ) { + if (p.getStatus() != null && p.getStatus().getContainerStatuses() != null + && p.getStatus().getContainerStatuses().size() > 0) { podReady = p.getStatus().getContainerStatuses().get(0).getReady(); } @@ -111,7 +105,8 @@ private static boolean collectPodsReadiness(PodList podList) { return false; } - public static boolean waitPodsReady(String namespace, String labelKey, String labelValue, TimeoutBudget timeout) { + public static boolean waitPodsReady(String namespace, String labelKey, String labelValue, + TimeoutBudget timeout) { while (!timeout.timeoutExpired()) { if (collectPodsReadiness(Kubernetes.getPods(namespace, labelKey, labelValue))) { return true; @@ -127,10 +122,8 @@ public static boolean waitPodsReady(String namespace, String labelKey, String la } if (!collectPodsReadiness(Kubernetes.getPods(namespace, labelKey, labelValue))) { - LOGGER.error( - "Pod(s) of catalog source in namespace {} with label {}={} failed readiness check.", - namespace, labelKey, labelValue - ); + LOGGER.error("Pod(s) of catalog source in namespace {} with label {}={} failed readiness check.", + namespace, labelKey, labelValue); return false; } @@ -139,7 +132,8 @@ public static boolean waitPodsReady(String namespace, String labelKey, String la } public static boolean waitPodsReady(String namespace, String labelKey, String labelValue) { - return waitPodsReady(namespace, labelKey, labelValue, TimeoutBudget.ofDuration(Duration.ofMinutes(3))); + return waitPodsReady(namespace, labelKey, labelValue, + TimeoutBudget.ofDuration(Duration.ofMinutes(3))); } public static boolean waitCatalogSourceExists(String namespace, String name, TimeoutBudget timeout) { @@ -158,7 +152,8 @@ public static boolean waitCatalogSourceExists(String namespace, String name, Tim } if (Kubernetes.getCatalogSource(namespace, name) == null) { - LOGGER.error("Catalog source in namespace {} with name {} failed creation check.", namespace, name); + LOGGER.error("Catalog source in namespace {} with name {} failed creation check.", namespace, + name); return false; } @@ -186,7 +181,8 @@ public static boolean waitCatalogSourceReady(String namespace, String name, Time } if (!Kubernetes.isCatalogSourceReady(namespace, name)) { - LOGGER.error("Catalog source in namespace {} with name {} failed readiness check.", namespace, name); + LOGGER.error("Catalog source in namespace {} with name {} failed readiness check.", namespace, + name); return false; } @@ -201,17 +197,12 @@ public static boolean waitCatalogSourceReady(String namespace, String name) { public static OperatorGroup createOperatorGroup(String namespace) throws InterruptedException { String name = namespace + "-operator-group"; - LOGGER.info("Creating operator group {} in namespace {} targeting namespace {}...", name, namespace, namespace); + LOGGER.info("Creating operator group {} in namespace {} targeting namespace {}...", name, namespace, + namespace); - OperatorGroup operatorGroup = new OperatorGroupBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withTargetNamespaces(namespace) - .endSpec() - .build(); + OperatorGroup operatorGroup = new OperatorGroupBuilder().withNewMetadata().withName(name) + .withNamespace(namespace).endMetadata().withNewSpec().withTargetNamespaces(namespace) + .endSpec().build(); ResourceManager.getInstance().createSharedResource(true, operatorGroup); @@ -222,9 +213,8 @@ public static void deleteOperatorGroup(OperatorGroup operatorGroup) { String name = operatorGroup.getMetadata().getName(); String namespace = operatorGroup.getMetadata().getNamespace(); List targetNamespaces = operatorGroup.getSpec().getTargetNamespaces(); - String info = MessageFormat.format( - "{0} in namespace {1} targeting namespaces {2}", name, namespace, targetNamespaces - ); + String info = MessageFormat.format("{0} in namespace {1} targeting namespaces {2}", name, namespace, + targetNamespaces); if (Kubernetes.getOperatorGroup(namespace, name) == null) { LOGGER.info("Operator group {} already removed.", info); @@ -244,11 +234,10 @@ public static void deleteSubscription(Subscription subscription) { String startingCSV = spec.getStartingCSV(); String info = MessageFormat.format( - "{0} in namespace {1}: packageName={2}, catalogSourceName={3}, catalogSourceNamespace={4}, " + - "startingCSV={5}, channel={6}, installPlanApproval={7}", - name, namespace, spec.getName(), spec.getSource(), spec.getSourceNamespace(), - startingCSV, spec.getChannel(), spec.getInstallPlanApproval() - ); + "{0} in namespace {1}: packageName={2}, catalogSourceName={3}, catalogSourceNamespace={4}, " + + "startingCSV={5}, channel={6}, installPlanApproval={7}", + name, namespace, spec.getName(), spec.getSource(), spec.getSourceNamespace(), startingCSV, + spec.getChannel(), spec.getInstallPlanApproval()); if (Kubernetes.getSubscription(namespace, name) == null) { LOGGER.info("Subscription {} already removed.", info); @@ -261,12 +250,14 @@ public static void deleteSubscription(Subscription subscription) { public static void deleteClusterServiceVersion(String namespace, String clusterServiceVersion) { if (clusterServiceVersion != null && !clusterServiceVersion.equals("")) { - LOGGER.info("Removing ClusterServiceVersion {} in namespace {}...", clusterServiceVersion, namespace); + LOGGER.info("Removing ClusterServiceVersion {} in namespace {}...", clusterServiceVersion, + namespace); Kubernetes.deleteClusterServiceVersion(namespace, clusterServiceVersion); if (Kubernetes.getClusterServiceVersion(namespace, clusterServiceVersion) == null) { - LOGGER.info("ClusterServiceVersion {} in namespace {} removed.", clusterServiceVersion, namespace); + LOGGER.info("ClusterServiceVersion {} in namespace {} removed.", clusterServiceVersion, + namespace); } } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ResourceUtils.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ResourceUtils.java index 9e620f3e15..c821c7c7f8 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ResourceUtils.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/framework/ResourceUtils.java @@ -46,7 +46,8 @@ public static boolean waitPackageManifestExists(String catalog, String name) { return waitPackageManifestExists(catalog, name, TimeoutBudget.ofDuration(Duration.ofMinutes(5))); } - public static boolean waitPackageManifestExists(String catalog, String name, TimeoutBudget timeoutBudget) { + public static boolean waitPackageManifestExists(String catalog, String name, + TimeoutBudget timeoutBudget) { while (!timeoutBudget.timeoutExpired()) { if (Kubernetes.getPackageManifest(catalog, name) != null) { return true; @@ -76,7 +77,7 @@ public static void updateRoleBindingNamespace(List resources, Strin // If resource is RoleBinding if (resource.getKind().equals("RoleBinding")) { // Iterate over all subjects in this RoleBinding - for (Subject s: ((RoleBinding) resource).getSubjects()) { + for (Subject s : ((RoleBinding) resource).getSubjects()) { // Change namespace of subject to operator namespace s.setNamespace(namespace); } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/OperatorManager.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/OperatorManager.java index 6b580911cf..712bfc33cb 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/OperatorManager.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/OperatorManager.java @@ -56,7 +56,8 @@ public void installOperator(OperatorType operatorType, boolean waitReady) throws String kind = operatorType.getKind().toString(); String name = operatorType.getDeploymentName(); String namespace = operatorType.getNamespaceName(); - String operatorInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, namespace); + String operatorInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, + namespace); if (Kubernetes.getNamespace(namespace) == null) { createOperatorNamespace(namespace); @@ -73,10 +74,8 @@ public void installOperator(OperatorType operatorType, boolean waitReady) throws if (waitReady) { LOGGER.info("Waiting for operator {} to be ready...", operatorInfo); - Assertions.assertTrue( - waitOperatorReady(operatorType), - MessageFormat.format("Timed out waiting for operator {0} to be ready.", operatorInfo) - ); + Assertions.assertTrue(waitOperatorReady(operatorType), + MessageFormat.format("Timed out waiting for operator {0} to be ready.", operatorInfo)); if (operatorType.isReady()) { LOGGER.info("Operator {} is ready.", operatorInfo); @@ -94,7 +93,8 @@ public void uninstallOperator(OperatorType operatorType, boolean waitRemoved) { String kind = operatorType.getKind().toString(); String name = operatorType.getDeploymentName(); String namespace = operatorType.getNamespaceName(); - String operatorInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, namespace); + String operatorInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, + namespace); LOGGER.info("Uninstalling operator {}...", operatorInfo); @@ -103,15 +103,13 @@ public void uninstallOperator(OperatorType operatorType, boolean waitRemoved) { if (waitRemoved) { LOGGER.info("Waiting for operator {} to be uninstalled...", operatorInfo); - Assertions.assertTrue( - waitOperatorRemoved(operatorType), - MessageFormat.format("Timed out waiting for operator {0} to be uninstalled.", operatorInfo) - ); + Assertions.assertTrue(waitOperatorRemoved(operatorType), MessageFormat + .format("Timed out waiting for operator {0} to be uninstalled.", operatorInfo)); if (operatorType.doesNotExist()) { LOGGER.info("Operator {} uninstalled.", operatorInfo); } - } else { + } else { LOGGER.info("Do not wait for operator {} to be uninstalled.", operatorInfo); } } @@ -162,10 +160,9 @@ public boolean waitOperatorReady(OperatorType operatorType, TimeoutBudget timeou } if (!operatorType.isReady()) { - LOGGER.error( - "Operator {} with name {} in namespace {} failed readiness check.", - operatorType.getKind(), operatorType.getDeploymentName(), operatorType.getNamespaceName() - ); + LOGGER.error("Operator {} with name {} in namespace {} failed readiness check.", + operatorType.getKind(), operatorType.getDeploymentName(), + operatorType.getNamespaceName()); return false; } @@ -193,10 +190,9 @@ public boolean waitOperatorRemoved(OperatorType operatorType, TimeoutBudget time } if (!operatorType.doesNotExist()) { - LOGGER.error( - "Operator {} with name {} in namespace {} failed removal check.", - operatorType.getKind(), operatorType.getDeploymentName(), operatorType.getNamespaceName() - ); + LOGGER.error("Operator {} with name {} in namespace {} failed removal check.", + operatorType.getKind(), operatorType.getDeploymentName(), + operatorType.getNamespaceName()); return false; } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryBundleOperatorType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryBundleOperatorType.java index 24159a9ee6..df9d452756 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryBundleOperatorType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryBundleOperatorType.java @@ -21,8 +21,7 @@ public void loadOperatorResourcesFromFile() throws Exception { if (getSource().startsWith("http://") || getSource().startsWith("https://")) { Path tmpPath = Environment.getTmpPath( - "apicurio-registry-bundle-operator-install-" + Instant.now().getEpochSecond() + ".yaml" - ); + "apicurio-registry-bundle-operator-install-" + Instant.now().getEpochSecond() + ".yaml"); LOGGER.info("Downloading file " + getSource() + " to " + tmpPath + "..."); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryOLMOperatorType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryOLMOperatorType.java index a362bcb592..0cdb3bb035 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryOLMOperatorType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/ApicurioRegistryOLMOperatorType.java @@ -21,7 +21,6 @@ import java.text.MessageFormat; import java.time.Duration; - public class ApicurioRegistryOLMOperatorType extends OLMOperator implements OperatorType { protected static final Logger LOGGER = LoggerUtils.getLogger(); private CatalogSource catalogSource = null; @@ -31,35 +30,22 @@ public ApicurioRegistryOLMOperatorType() { } public ApicurioRegistryOLMOperatorType(boolean isClusterWide) { - super( - Environment.CATALOG_IMAGE, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, - isClusterWide - ); + super(Environment.CATALOG_IMAGE, + isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, isClusterWide); } public ApicurioRegistryOLMOperatorType(String source, boolean isClusterWide) { - super( - source, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, - isClusterWide - ); + super(source, isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, + isClusterWide); } public ApicurioRegistryOLMOperatorType(boolean isClusterWide, String operatorNamespace) { - super( - Environment.CATALOG_IMAGE, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, - isClusterWide - ); + super(Environment.CATALOG_IMAGE, + isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, isClusterWide); } public ApicurioRegistryOLMOperatorType(String source, boolean isClusterWide, String operatorNamespace) { - super( - source, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, - isClusterWide - ); + super(source, isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, isClusterWide); } /** @@ -67,7 +53,8 @@ public ApicurioRegistryOLMOperatorType(String source, boolean isClusterWide, Str */ private void createCatalogSource(String namespace) throws InterruptedException { String name = Constants.CATALOG_NAME; - String info = MessageFormat.format("{0} in namespace {1} with image {2}", name, namespace, getSource()); + String info = MessageFormat.format("{0} in namespace {1} with image {2}", name, namespace, + getSource()); LOGGER.info("Creating catalog source {}...", info); @@ -160,15 +147,8 @@ public void install() throws InterruptedException { LOGGER.info("OLM operator CSV: {}", getClusterServiceVersion()); - setSubscription(SubscriptionResourceType.getDefault( - "registry-subscription", - getNamespace(), - registryPackage, - catalogName, - catalogNamespace, - getClusterServiceVersion(), - channelName - )); + setSubscription(SubscriptionResourceType.getDefault("registry-subscription", getNamespace(), + registryPackage, catalogName, catalogNamespace, getClusterServiceVersion(), channelName)); ResourceManager.getInstance().createResource(true, getSubscription()); @@ -213,7 +193,8 @@ public boolean waitReady() { } public void upgrade() throws InterruptedException { - LOGGER.info("Upgrading {} {} operator...", getClusterWide() ? "cluster wide" : "namespaced", getKind()); + LOGGER.info("Upgrading {} {} operator...", getClusterWide() ? "cluster wide" : "namespaced", + getKind()); // Get current subscription namespace String subNamespace = getSubscription().getMetadata().getNamespace(); @@ -222,9 +203,8 @@ public void upgrade() throws InterruptedException { // Get namespace of current's subscription catalog String catalogNamespace = getSubscription().getSpec().getSourceNamespace(); - LOGGER.info( - "CSV before upgrade: {}", Kubernetes.getSubscription(subNamespace, subName).getStatus().getCurrentCSV() - ); + LOGGER.info("CSV before upgrade: {}", + Kubernetes.getSubscription(subNamespace, subName).getStatus().getCurrentCSV()); // Update operator source (set it to image with catalog) setSource(Environment.CATALOG_IMAGE); @@ -239,13 +219,10 @@ public void upgrade() throws InterruptedException { Kubernetes.createOrReplaceSubscription(subNamespace, getSubscription()); // Wait for update of subscription (it points to CSV from new catalog source) - Assertions.assertTrue( - waitSubscriptionCurrentCSV(catalogSource.getMetadata().getName()), + Assertions.assertTrue(waitSubscriptionCurrentCSV(catalogSource.getMetadata().getName()), MessageFormat.format( "Timed out waiting for subscription {0} to have new current ClusterServiceVersion.", - MessageFormat.format("{0} in namespace {1}", subName, subNamespace) - ) - ); + MessageFormat.format("{0} in namespace {1}", subName, subNamespace))); // Get updated subscription Subscription newSubscription = Kubernetes.getSubscription(subNamespace, subName); @@ -261,10 +238,8 @@ public void upgrade() throws InterruptedException { setClusterServiceVersion(newCSV); // Wait for creation of new CSV and its readiness - Assertions.assertTrue( - waitClusterServiceVersionReady(), - MessageFormat.format("New CSV {0} failed readiness check.", newCSV) - ); + Assertions.assertTrue(waitClusterServiceVersionReady(), + MessageFormat.format("New CSV {0} failed readiness check.", newCSV)); // Wait for operator readiness Assertions.assertTrue(waitReady(), "Operator failed readiness check after upgrade."); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/KeycloakOLMOperatorType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/KeycloakOLMOperatorType.java index fc1987b1c4..e7e6d929fc 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/KeycloakOLMOperatorType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/KeycloakOLMOperatorType.java @@ -67,15 +67,8 @@ public void install() throws InterruptedException { LOGGER.info("OLM operator CSV: {}", getClusterServiceVersion()); - setSubscription(SubscriptionResourceType.getDefault( - "sso-subscription", - getNamespace(), - ssoPackage, - catalogName, - catalogNamespace, - getClusterServiceVersion(), - channelName - )); + setSubscription(SubscriptionResourceType.getDefault("sso-subscription", getNamespace(), ssoPackage, + catalogName, catalogNamespace, getClusterServiceVersion(), channelName)); ResourceManager.getInstance().createSharedResource(true, getSubscription()); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OLMOperator.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OLMOperator.java index f155368053..3997ec383c 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OLMOperator.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OLMOperator.java @@ -58,7 +58,8 @@ public boolean waitSubscriptionCurrentCSV(String catalog, TimeoutBudget timeout) String expectedCSV = OperatorUtils.getCurrentCSV(catalog, Environment.REGISTRY_PACKAGE, channel); String subscriptionNamespace = subscription.getMetadata().getNamespace(); String subscriptionName = subscription.getMetadata().getName(); - Subscription operatorSubscription = Kubernetes.getSubscription(subscriptionNamespace, subscriptionName); + Subscription operatorSubscription = Kubernetes.getSubscription(subscriptionNamespace, + subscriptionName); while (!timeout.timeoutExpired()) { if (operatorSubscription.getStatus().getCurrentCSV().equals(expectedCSV)) { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OperatorKind.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OperatorKind.java index 8831c26c8d..dac80df659 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OperatorKind.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/OperatorKind.java @@ -1,9 +1,5 @@ package io.apicurio.registry.systemtests.operator.types; public enum OperatorKind { - REGISTRY_BUNDLE_OPERATOR, - REGISTRY_OLM_OPERATOR, - STRIMZI_BUNDLE_OPERATOR, - STRIMZI_OLM_OPERATOR, - KEYCLOAK_OLM_OPERATOR + REGISTRY_BUNDLE_OPERATOR, REGISTRY_OLM_OPERATOR, STRIMZI_BUNDLE_OPERATOR, STRIMZI_OLM_OPERATOR, KEYCLOAK_OLM_OPERATOR } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterBundleOperatorType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterBundleOperatorType.java index ca2bfb1a47..4c2695c89b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterBundleOperatorType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterBundleOperatorType.java @@ -35,8 +35,7 @@ public void loadOperatorResources() throws Exception { // Get path to clone of repo Path clonePath = Environment.getTmpPath("strimzi-bundle-repo-" + timestamp); // Clone repo from repo URL to clone repo path - Git.cloneRepository() - .setURI(repoUrl) // Repo URL + Git.cloneRepository().setURI(repoUrl) // Repo URL .setDirectory(clonePath.toFile()) // Repo clone path .call(); // Run cloning diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterOLMOperatorType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterOLMOperatorType.java index 6ab9204145..f1b6bfcd13 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterOLMOperatorType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/operator/types/StrimziClusterOLMOperatorType.java @@ -20,19 +20,12 @@ public StrimziClusterOLMOperatorType() { } public StrimziClusterOLMOperatorType(boolean isClusterWide) { - super( - null, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, - isClusterWide - ); + super(null, isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : Environment.NAMESPACE, + isClusterWide); } public StrimziClusterOLMOperatorType(String source, String operatorNamespace, boolean isClusterWide) { - super( - source, - isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, - isClusterWide - ); + super(source, isClusterWide ? Environment.CLUSTER_WIDE_NAMESPACE : operatorNamespace, isClusterWide); } @Override @@ -52,7 +45,8 @@ public String getDeploymentName() { @Override public Deployment getDeployment() { - return Kubernetes.getDeploymentByPrefix(getSubscription().getMetadata().getNamespace(), getDeploymentName()); + return Kubernetes.getDeploymentByPrefix(getSubscription().getMetadata().getNamespace(), + getDeploymentName()); } @Override @@ -64,9 +58,11 @@ public void install() throws InterruptedException { String kafkaPackage = Environment.KAFKA_PACKAGE; if (getClusterWide()) { - LOGGER.info("Installing cluster wide OLM operator {} in namespace {}...", getKind(), getNamespace()); + LOGGER.info("Installing cluster wide OLM operator {} in namespace {}...", getKind(), + getNamespace()); } else { - LOGGER.info("Installing namespaced OLM operator {} in namespace {}...", getKind(), getNamespace()); + LOGGER.info("Installing namespaced OLM operator {} in namespace {}...", getKind(), + getNamespace()); if (!Kubernetes.namespaceHasAnyOperatorGroup(getNamespace())) { setOperatorGroup(OperatorUtils.createOperatorGroup(getNamespace())); } @@ -79,17 +75,10 @@ public void install() throws InterruptedException { LOGGER.info("OLM operator CSV: {}", getClusterServiceVersion()); - setSubscription(SubscriptionResourceType.getDefault( - "kafka-subscription", - getNamespace(), - kafkaPackage, - catalogName, - catalogNamespace, - getClusterServiceVersion(), - channelName - )); - - ResourceManager.getInstance().createSharedResource( true, getSubscription()); + setSubscription(SubscriptionResourceType.getDefault("kafka-subscription", getNamespace(), + kafkaPackage, catalogName, catalogNamespace, getClusterServiceVersion(), channelName)); + + ResourceManager.getInstance().createSharedResource(true, getSubscription()); /* Waiting for operator deployment readiness is implemented in OperatorManager. */ } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/platform/Kubernetes.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/platform/Kubernetes.java index b06d03ed69..7ebc9009ee 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/platform/Kubernetes.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/platform/Kubernetes.java @@ -42,9 +42,7 @@ public final class Kubernetes { private static KubernetesClient client; private Kubernetes() { - Config config = Config.autoConfigure( - System.getenv().getOrDefault("TEST_CLUSTER_CONTEXT", null) - ); + Config config = Config.autoConfigure(System.getenv().getOrDefault("TEST_CLUSTER_CONTEXT", null)); client = new DefaultOpenShiftClient(new OpenShiftConfig(config)); } @@ -96,89 +94,50 @@ public static List loadFromDirectory(Path path) { } public static void createOrReplaceResources(String namespace, Collection resourcesList) { - getClient() - .resourceList(resourcesList) - .inNamespace(namespace) - .createOrReplace(); + getClient().resourceList(resourcesList).inNamespace(namespace).createOrReplace(); } public static void deleteResources(String namespace, Collection resourcesList) { - getClient() - .resourceList(resourcesList) - .inNamespace(namespace) - .delete(); + getClient().resourceList(resourcesList).inNamespace(namespace).delete(); } public static Secret getSecret(String namespace, String name) { - return getClient() - .secrets() - .inNamespace(namespace) - .withName(name) - .get(); + return getClient().secrets().inNamespace(namespace).withName(name).get(); } public static void createSecret(String namespace, Secret secret) { - getClient() - .secrets() - .inNamespace(namespace) - .create(secret); + getClient().secrets().inNamespace(namespace).create(secret); } public static void createOrReplaceSecret(String namespace, Secret secret) { - getClient() - .secrets() - .inNamespace(namespace) - .createOrReplace(secret); + getClient().secrets().inNamespace(namespace).createOrReplace(secret); } public static void deleteSecret(String namespace, String name) { - getClient() - .secrets() - .inNamespace(namespace) - .withName(name) - .delete(); + getClient().secrets().inNamespace(namespace).withName(name).delete(); } public static StatefulSet getStatefulSet(String namespace, String name) { - return getClient() - .apps() - .statefulSets() - .inNamespace(namespace) - .withName(name) - .get(); + return getClient().apps().statefulSets().inNamespace(namespace).withName(name).get(); } public static void createCatalogSource(String namespace, CatalogSource catalogSource) { - ((OpenShiftClient) getClient()) - .operatorHub() - .catalogSources() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().catalogSources().inNamespace(namespace) .create(catalogSource); } public static void createOrReplaceCatalogSource(String namespace, CatalogSource catalogSource) { - ((OpenShiftClient) getClient()) - .operatorHub() - .catalogSources() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().catalogSources().inNamespace(namespace) .createOrReplace(catalogSource); } public static CatalogSource getCatalogSource(String namespace, String name) { - return ((OpenShiftClient) getClient()) - .operatorHub() - .catalogSources() - .inNamespace(namespace) - .withName(name) - .get(); + return ((OpenShiftClient) getClient()).operatorHub().catalogSources().inNamespace(namespace) + .withName(name).get(); } public static void deleteCatalogSource(String namespace, String name) { - ((OpenShiftClient) getClient()) - .operatorHub() - .catalogSources() - .inNamespace(namespace) - .withName(name) + ((OpenShiftClient) getClient()).operatorHub().catalogSources().inNamespace(namespace).withName(name) .delete(); } @@ -193,22 +152,15 @@ public static boolean isCatalogSourceReady(String namespace, String name) { } public static void createNamespace(Namespace namespace) { - Kubernetes.getClient() - .namespaces() - .create(namespace); + Kubernetes.getClient().namespaces().create(namespace); } public static void createOrReplaceNamespace(Namespace namespace) { - Kubernetes.getClient() - .namespaces() - .createOrReplace(namespace); + Kubernetes.getClient().namespaces().createOrReplace(namespace); } public static Namespace getNamespace(String name) { - return getClient() - .namespaces() - .withName(name) - .get(); + return getClient().namespaces().withName(name).get(); } public static Namespace getNamespace(Namespace namespace) { @@ -216,50 +168,30 @@ public static Namespace getNamespace(Namespace namespace) { } public static void deleteNamespace(String name) { - getClient() - .namespaces() - .withName(name) - .delete(); + getClient().namespaces().withName(name).delete(); } public static Route getRoute(String namespace, String name) { - return ((OpenShiftClient) getClient()) - .routes() - .inNamespace(namespace) - .withName(name) - .get(); + return ((OpenShiftClient) getClient()).routes().inNamespace(namespace).withName(name).get(); } public static Route getRoute(ApicurioRegistry apicurioRegistry) { - return ((OpenShiftClient) getClient()) - .routes() + return ((OpenShiftClient) getClient()).routes() .inNamespace(apicurioRegistry.getMetadata().getNamespace()) - .withLabels(Collections.singletonMap("app", apicurioRegistry.getMetadata().getName())) - .list() - .getItems() - .get(0); + .withLabels(Collections.singletonMap("app", apicurioRegistry.getMetadata().getName())).list() + .getItems().get(0); } public static void createRoute(String namespace, Route route) { - ((OpenShiftClient) getClient()) - .routes() - .inNamespace(namespace) - .create(route); + ((OpenShiftClient) getClient()).routes().inNamespace(namespace).create(route); } public static void createOrReplaceRoute(String namespace, Route route) { - ((OpenShiftClient) getClient()) - .routes() - .inNamespace(namespace) - .createOrReplace(route); + ((OpenShiftClient) getClient()).routes().inNamespace(namespace).createOrReplace(route); } public static void deleteRoute(String namespace, String name) { - ((OpenShiftClient) getClient()) - .routes() - .inNamespace(namespace) - .withName(name) - .delete(); + ((OpenShiftClient) getClient()).routes().inNamespace(namespace).withName(name).delete(); } public static boolean isRouteReady(String namespace, String name) { @@ -269,130 +201,76 @@ public static boolean isRouteReady(String namespace, String name) { return false; } - return route - .getStatus() - .getIngress() - .size() > 0; + return route.getStatus().getIngress().size() > 0; } public static PodList getPods(String namespace, String labelKey, String labelValue) { - return getClient() - .pods() - .inNamespace(namespace) - .withLabel(labelKey, labelValue) - .list(); + return getClient().pods().inNamespace(namespace).withLabel(labelKey, labelValue).list(); } public static void deletePods(String namespace, String labelKey, String labelValue) { - getClient() - .pods() - .inNamespace(namespace) - .withLabel(labelKey, labelValue) - .delete(); + getClient().pods().inNamespace(namespace).withLabel(labelKey, labelValue).delete(); } public static OperatorGroup getOperatorGroup(String namespace, String name) { - return ((OpenShiftClient) getClient()) - .operatorHub() - .operatorGroups() - .inNamespace(namespace) - .withName(name) - .get(); + return ((OpenShiftClient) getClient()).operatorHub().operatorGroups().inNamespace(namespace) + .withName(name).get(); } public static void createOperatorGroup(String namespace, OperatorGroup operatorGroup) { - ((OpenShiftClient) getClient()) - .operatorHub() - .operatorGroups() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().operatorGroups().inNamespace(namespace) .create(operatorGroup); } public static void createOrReplaceOperatorGroup(String namespace, OperatorGroup operatorGroup) { - ((OpenShiftClient) getClient()) - .operatorHub() - .operatorGroups() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().operatorGroups().inNamespace(namespace) .createOrReplace(operatorGroup); } public static void deleteOperatorGroup(String namespace, String name) { - ((OpenShiftClient) getClient()) - .operatorHub() - .operatorGroups() - .inNamespace(namespace) - .withName(name) + ((OpenShiftClient) getClient()).operatorHub().operatorGroups().inNamespace(namespace).withName(name) .delete(); } public static void createSubscription(String namespace, Subscription subscription) { - ((OpenShiftClient) getClient()) - .operatorHub() - .subscriptions() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().subscriptions().inNamespace(namespace) .create(subscription); } public static void createOrReplaceSubscription(String namespace, Subscription subscription) { - ((OpenShiftClient) getClient()) - .operatorHub() - .subscriptions() - .inNamespace(namespace) + ((OpenShiftClient) getClient()).operatorHub().subscriptions().inNamespace(namespace) .createOrReplace(subscription); } public static Subscription getSubscription(String namespace, String name) { - return ((OpenShiftClient) getClient()) - .operatorHub() - .subscriptions() - .inNamespace(namespace) - .withName(name) - .get(); + return ((OpenShiftClient) getClient()).operatorHub().subscriptions().inNamespace(namespace) + .withName(name).get(); } public static void deleteSubscription(String namespace, String name) { - ((OpenShiftClient) getClient()) - .operatorHub() - .subscriptions() - .inNamespace(namespace) - .withName(name) + ((OpenShiftClient) getClient()).operatorHub().subscriptions().inNamespace(namespace).withName(name) .delete(); } public static ClusterServiceVersion getClusterServiceVersion(String namespace, String name) { - return ((OpenShiftClient) getClient()) - .operatorHub() - .clusterServiceVersions() - .inNamespace(namespace) - .withName(name) - .get(); + return ((OpenShiftClient) getClient()).operatorHub().clusterServiceVersions().inNamespace(namespace) + .withName(name).get(); } public static boolean isClusterServiceVersionReady(String namespace, String name) { - ClusterServiceVersion csvToBeReady = ((OpenShiftClient) getClient()) - .operatorHub() - .clusterServiceVersions() - .inNamespace(namespace) - .withName(name) - .get(); + ClusterServiceVersion csvToBeReady = ((OpenShiftClient) getClient()).operatorHub() + .clusterServiceVersions().inNamespace(namespace).withName(name).get(); if (csvToBeReady == null || csvToBeReady.getStatus() == null) { return false; } - return csvToBeReady - .getStatus() - .getPhase() - .equals("Succeeded"); + return csvToBeReady.getStatus().getPhase().equals("Succeeded"); } public static void deleteClusterServiceVersion(String namespace, String name) { - ((OpenShiftClient) getClient()) - .operatorHub() - .clusterServiceVersions() - .inNamespace(namespace) - .withName(name) - .delete(); + ((OpenShiftClient) getClient()).operatorHub().clusterServiceVersions().inNamespace(namespace) + .withName(name).delete(); } public static String getRouteHost(String namespace, String name) { @@ -402,40 +280,23 @@ public static String getRouteHost(String namespace, String name) { return null; } - return route - .getStatus() - .getIngress() - .get(0) - .getHost(); + return route.getStatus().getIngress().get(0).getHost(); } public static String getSecretValue(String namespace, String name, String secretKey) { - return getSecret(namespace, name) - .getData() - .get(secretKey); + return getSecret(namespace, name).getData().get(secretKey); } public static PackageManifest getPackageManifest(String catalog, String name) { - return ((OpenShiftClient) Kubernetes.getClient()) - .operatorHub() - .packageManifests() - .list() - .getItems() - .stream() - .filter(p -> p.getMetadata().getName().equals(name)) - .filter(p -> p.getMetadata().getLabels().get("catalog").equals(catalog)) - .findFirst() + return ((OpenShiftClient) Kubernetes.getClient()).operatorHub().packageManifests().list().getItems() + .stream().filter(p -> p.getMetadata().getName().equals(name)) + .filter(p -> p.getMetadata().getLabels().get("catalog").equals(catalog)).findFirst() .orElse(null); } public static Deployment getDeployment(String namespace, String name) { try { - return getClient() - .apps() - .deployments() - .inNamespace(namespace) - .withName(name) - .get(); + return getClient().apps().deployments().inNamespace(namespace).withName(name).get(); } catch (Exception e) { return null; } @@ -443,111 +304,57 @@ public static Deployment getDeployment(String namespace, String name) { } public static Deployment getDeploymentByPrefix(String namespace, String prefix) { - return getClient() - .apps() - .deployments() - .inNamespace(namespace) - .list() - .getItems() - .stream() - .filter(d -> d.getMetadata().getName().startsWith(prefix)) - .findFirst() - .orElse(null); + return getClient().apps().deployments().inNamespace(namespace).list().getItems().stream() + .filter(d -> d.getMetadata().getName().startsWith(prefix)).findFirst().orElse(null); } public static void createDeployment(String namespace, Deployment deployment) { - getClient() - .apps() - .deployments() - .inNamespace(namespace) - .create(deployment); + getClient().apps().deployments().inNamespace(namespace).create(deployment); } public static void createOrReplaceDeployment(String namespace, Deployment deployment) { - getClient() - .apps() - .deployments() - .inNamespace(namespace) - .createOrReplace(deployment); + getClient().apps().deployments().inNamespace(namespace).createOrReplace(deployment); } public static void deleteDeployment(String namespace, String name) { - getClient() - .apps() - .deployments() - .inNamespace(namespace) - .withName(name) - .delete(); + getClient().apps().deployments().inNamespace(namespace).withName(name).delete(); } public static Service getService(String namespace, String name) { - return getClient() - .services() - .inNamespace(namespace) - .withName(name) - .get(); + return getClient().services().inNamespace(namespace).withName(name).get(); } public static void createService(String namespace, Service service) { - getClient() - .services() - .inNamespace(namespace) - .create(service); + getClient().services().inNamespace(namespace).create(service); } public static void createOrReplaceService(String namespace, Service service) { - getClient() - .services() - .inNamespace(namespace) - .createOrReplace(service); + getClient().services().inNamespace(namespace).createOrReplace(service); } public static void deleteService(String namespace, String name) { - getClient() - .services() - .inNamespace(namespace) - .withName(name) - .delete(); + getClient().services().inNamespace(namespace).withName(name).delete(); } public static boolean isServiceReady(String namespace, Map selector) { - return getClient() - .pods() - .inNamespace(namespace) - .withLabels(selector) - .list() - .getItems() - .size() > 0; + return getClient().pods().inNamespace(namespace).withLabels(selector).list().getItems().size() > 0; } public static PersistentVolumeClaim getPersistentVolumeClaim(String namespace, String name) { - return getClient() - .persistentVolumeClaims() - .inNamespace(namespace) - .withName(name) - .get(); + return getClient().persistentVolumeClaims().inNamespace(namespace).withName(name).get(); } public static void createPersistentVolumeClaim(String namespace, PersistentVolumeClaim volumeClaim) { - getClient() - .persistentVolumeClaims() - .inNamespace(namespace) - .create(volumeClaim); + getClient().persistentVolumeClaims().inNamespace(namespace).create(volumeClaim); } - public static void createOrReplacePersistentVolumeClaim(String namespace, PersistentVolumeClaim volumeClaim) { - getClient() - .persistentVolumeClaims() - .inNamespace(namespace) - .createOrReplace(volumeClaim); + public static void createOrReplacePersistentVolumeClaim(String namespace, + PersistentVolumeClaim volumeClaim) { + getClient().persistentVolumeClaims().inNamespace(namespace).createOrReplace(volumeClaim); } public static void deletePersistentVolumeClaim(String namespace, String name) { - getClient() - .persistentVolumeClaims() - .inNamespace(namespace) - .withName(name) - .delete(); + getClient().persistentVolumeClaims().inNamespace(namespace).withName(name).delete(); } public static boolean isStatefulSetReady(String namespace, String name) { @@ -566,19 +373,14 @@ public static boolean isStatefulSetReady(String namespace, String name) { return status.getReadyReplicas() > 0; } - public static MixedOperation, Resource> - getResources(Class tClass) { + public static MixedOperation, Resource> getResources( + Class tClass) { return Kubernetes.getClient().resources(tClass); } public static boolean namespaceHasAnyOperatorGroup(String name) { - int namespaceOperatorGroupsCount = ((OpenShiftClient) getClient()) - .operatorHub() - .operatorGroups() - .inNamespace(name) - .list() - .getItems() - .size(); + int namespaceOperatorGroupsCount = ((OpenShiftClient) getClient()).operatorHub().operatorGroups() + .inNamespace(name).list().getItems().size(); return namespaceOperatorGroupsCount > 0; } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/ResourceManager.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/ResourceManager.java index b47a4e805e..8bf96cbc7d 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/ResourceManager.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/ResourceManager.java @@ -49,23 +49,12 @@ public static synchronized ResourceManager getInstance() { return instance; } - private final ResourceType[] resourceTypes = new ResourceType[]{ - new ApicurioRegistryResourceType(), - new NamespaceResourceType(), - new ServiceResourceType(), - new DeploymentResourceType(), - new PersistentVolumeClaimResourceType(), - new KafkaResourceType(), - new KafkaTopicResourceType(), - new KafkaUserResourceType(), - new KafkaConnectResourceType(), - new RouteResourceType(), - new SecretResourceType(), - new OperatorGroupResourceType(), - new SubscriptionResourceType(), - new CatalogSourceResourceType(), - new IngressResourceType() - }; + private final ResourceType[] resourceTypes = new ResourceType[] { new ApicurioRegistryResourceType(), + new NamespaceResourceType(), new ServiceResourceType(), new DeploymentResourceType(), + new PersistentVolumeClaimResourceType(), new KafkaResourceType(), new KafkaTopicResourceType(), + new KafkaUserResourceType(), new KafkaConnectResourceType(), new RouteResourceType(), + new SecretResourceType(), new OperatorGroupResourceType(), new SubscriptionResourceType(), + new CatalogSourceResourceType(), new IngressResourceType() }; public ResourceType findResourceType(T resource) { ResourceType result = null; @@ -81,13 +70,13 @@ public ResourceType findResourceType(T resource) { return result; } - public final void createResource( - boolean waitReady, T resource - ) throws InterruptedException { + public final void createResource(boolean waitReady, T resource) + throws InterruptedException { String kind = resource.getKind(); String name = resource.getMetadata().getName(); String namespace = resource.getMetadata().getNamespace(); - String resourceInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, namespace); + String resourceInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, + namespace); LOGGER.info("Creating resource {}...", resourceInfo); @@ -111,10 +100,8 @@ public final void createResource( Thread.sleep(Duration.ofMinutes(1).toMillis()); } if (waitReady) { - Assertions.assertTrue( - waitResourceCondition(resource, type::isReady), - MessageFormat.format("Timed out waiting for resource {0} to be ready.", resourceInfo) - ); + Assertions.assertTrue(waitResourceCondition(resource, type::isReady), + MessageFormat.format("Timed out waiting for resource {0} to be ready.", resourceInfo)); LOGGER.info("Resource {} is ready.", resourceInfo); @@ -125,13 +112,13 @@ public final void createResource( } } - public final void createSharedResource( - boolean waitReady, T resource - ) throws InterruptedException { + public final void createSharedResource(boolean waitReady, T resource) + throws InterruptedException { String kind = resource.getKind(); String name = resource.getMetadata().getName(); String namespace = resource.getMetadata().getNamespace(); - String resourceInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, namespace); + String resourceInfo = MessageFormat.format("{0} with name {1} in namespace {2}", kind, name, + namespace); LOGGER.info("Creating shared resource {}...", resourceInfo); @@ -152,10 +139,8 @@ public final void createSharedResource( LOGGER.info("Shared resource {} created.", resourceInfo); if (waitReady) { - Assertions.assertTrue( - waitResourceCondition(resource, type::isReady), - MessageFormat.format("Timed out waiting for shared resource {0} to be ready.", resourceInfo) - ); + Assertions.assertTrue(waitResourceCondition(resource, type::isReady), MessageFormat + .format("Timed out waiting for shared resource {0} to be ready.", resourceInfo)); LOGGER.info("Shared resource {} is ready.", resourceInfo); @@ -167,16 +152,12 @@ public final void createSharedResource( } public final boolean waitResourceCondition(T resource, Predicate condition) { - return waitResourceCondition( - resource, - condition, - TimeoutBudget.ofDuration(findResourceType(resource).getTimeout()) - ); + return waitResourceCondition(resource, condition, + TimeoutBudget.ofDuration(findResourceType(resource).getTimeout())); } - public final boolean waitResourceCondition( - T resource, Predicate condition, TimeoutBudget timeout - ) { + public final boolean waitResourceCondition(T resource, Predicate condition, + TimeoutBudget timeout) { Assertions.assertNotNull(resource); Assertions.assertNotNull(resource.getMetadata()); Assertions.assertNotNull(resource.getMetadata().getName()); @@ -229,17 +210,17 @@ public static String resourceToString(T resource) { } public final void deleteResource(T resource) { - String resourceInfo = MessageFormat.format( - "{0} with name {1} in namespace {2}", - resource.getKind(), resource.getMetadata().getName(), resource.getMetadata().getNamespace() - ); + String resourceInfo = MessageFormat.format("{0} with name {1} in namespace {2}", resource.getKind(), + resource.getMetadata().getName(), resource.getMetadata().getNamespace()); LOGGER.info("Deleting resource {}...", resourceInfo); ResourceType type = findResourceType(resource); - /*if (resourceInfo.contains("Subscription") && (resourceInfo.contains("sso") || resourceInfo.contains("keycloak"))) { - KeycloakUtils.removeKeycloak(resource.getMetadata().getNamespace()); - }*/ + /* + * if (resourceInfo.contains("Subscription") && (resourceInfo.contains("sso") || + * resourceInfo.contains("keycloak"))) { + * KeycloakUtils.removeKeycloak(resource.getMetadata().getNamespace()); } + */ try { type.delete(resource); @@ -248,19 +229,21 @@ public final void deleteResource(T resource) { } Assertions.assertTrue( - waitResourceCondition(resource, type::doesNotExist, TimeoutBudget.ofDuration(Duration.ofMinutes(10))), - MessageFormat.format("Timed out waiting for resource {0} to be deleted.", resourceInfo) - ); + waitResourceCondition(resource, type::doesNotExist, + TimeoutBudget.ofDuration(Duration.ofMinutes(10))), + MessageFormat.format("Timed out waiting for resource {0} to be deleted.", resourceInfo)); LOGGER.info("Resource {} is deleted.", resourceInfo); } public void deleteKafka() { - Kafka kafka = KafkaResourceType.getOperation().inNamespace(Environment.NAMESPACE).withName(Constants.KAFKA).get(); + Kafka kafka = KafkaResourceType.getOperation().inNamespace(Environment.NAMESPACE) + .withName(Constants.KAFKA).get(); if (kafka != null) { deleteResource(kafka); } } + public void deleteSharedResources() { LOGGER.info("----------------------------------------------"); LOGGER.info("Going to clear shared resources."); @@ -273,6 +256,7 @@ public void deleteSharedResources() { LOGGER.info("----------------------------------------------"); LOGGER.info(""); } + public void deleteResources() { LOGGER.info("----------------------------------------------"); LOGGER.info("Going to clear test resources."); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ApicurioRegistryResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ApicurioRegistryResourceType.java index 1cfefcd42a..e14728e17b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ApicurioRegistryResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ApicurioRegistryResourceType.java @@ -26,58 +26,41 @@ public String getKind() { @Override public ApicurioRegistry get(String namespace, String name) { - return getOperation() - .inNamespace(namespace) - .withName(name) - .get(); + return getOperation().inNamespace(namespace).withName(name).get(); } - public static MixedOperation, Resource> - getOperation() { + public static MixedOperation, Resource> getOperation() { return Kubernetes.getResources(ApicurioRegistry.class); } @Override public void create(ApicurioRegistry resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .create(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).create(resource); } @Override public void createOrReplace(ApicurioRegistry resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .createOrReplace(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).createOrReplace(resource); } @Override public void delete(ApicurioRegistry resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .withName(resource.getMetadata().getName()) - .delete(); + getOperation().inNamespace(resource.getMetadata().getNamespace()) + .withName(resource.getMetadata().getName()).delete(); } @Override public boolean isReady(ApicurioRegistry resource) { - ApicurioRegistry apicurioRegistry = get( - resource.getMetadata().getNamespace(), - resource.getMetadata().getName() - ); + ApicurioRegistry apicurioRegistry = get(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); if (apicurioRegistry == null || apicurioRegistry.getStatus() == null) { return false; } - return apicurioRegistry - .getStatus() - .getConditions() - .stream() + return apicurioRegistry.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("Ready")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -98,67 +81,35 @@ public void refreshResource(ApicurioRegistry existing, ApicurioRegistry newResou /** Get default instances **/ - - public static ApicurioRegistry getDefaultMem(String name, String namespace) { - return new ApicurioRegistryBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withNewConfiguration() - .withPersistence("mem") - .endConfiguration() - .endSpec() - .build(); + return new ApicurioRegistryBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().withNewSpec().withNewConfiguration().withPersistence("mem").endConfiguration() + .endSpec().build(); } public static ApicurioRegistry getDefaultSql(String name, String namespace) { return getDefaultSql(name, namespace, "postgresql", "postgresql"); } - public static ApicurioRegistry getDefaultSql(String name, String namespace, String sqlName, String sqlNamespace) { - String sqlUrl = "jdbc:postgresql://" + sqlName + "." + sqlNamespace + ".svc.cluster.local:5432/postgresdb"; - - return new ApicurioRegistryBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withNewConfiguration() - .withPersistence("sql") - .withNewSql() - .withNewDataSource() - .withUrl(sqlUrl) - .withUserName("postgresuser") - .withPassword("postgrespassword") - .endDataSource() - .endSql() - .endConfiguration() - .endSpec() + public static ApicurioRegistry getDefaultSql(String name, String namespace, String sqlName, + String sqlNamespace) { + String sqlUrl = "jdbc:postgresql://" + sqlName + "." + sqlNamespace + + ".svc.cluster.local:5432/postgresdb"; + + return new ApicurioRegistryBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().withNewSpec().withNewConfiguration().withPersistence("sql").withNewSql() + .withNewDataSource().withUrl(sqlUrl).withUserName("postgresuser") + .withPassword("postgrespassword").endDataSource().endSql().endConfiguration().endSpec() .build(); } + public static ApicurioRegistry getDefaultKafkasql(String name, String namespace) { - return new ApicurioRegistryBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withNewConfiguration() - .withPersistence("kafkasql") - .withNewKafkasql() - .withBootstrapServers( - Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + - ".svc.cluster.local:9092" - ) - .endKafkasql() - .endConfiguration() - .endSpec() - .build(); + return new ApicurioRegistryBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().withNewSpec().withNewConfiguration().withPersistence("kafkasql") + .withNewKafkasql().withBootstrapServers(Constants.KAFKA + "-kafka-bootstrap." + + Environment.NAMESPACE + ".svc.cluster.local:9092") + .endKafkasql().endConfiguration().endSpec().build(); } public static ApicurioRegistry getDefaultMem(String name) { @@ -186,67 +137,30 @@ public static ApicurioRegistry getDefaultKafkasql() { } public static void updateWithDefaultTLS(ApicurioRegistry apicurioRegistry) { - apicurioRegistry - .getSpec() - .getConfiguration() - .getKafkasql() - .setSecurity( - new SecurityBuilder() - .withNewTls() - .withKeystoreSecretName(Constants.KAFKA_USER + "-keystore") - .withTruststoreSecretName(Constants.KAFKA + "-cluster-ca-truststore") - .endTls() - .build() - ); - - apicurioRegistry - .getSpec() - .getConfiguration() - .getKafkasql() - .setBootstrapServers( - Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + - ".svc.cluster.local:9093" - ); + apicurioRegistry.getSpec().getConfiguration().getKafkasql().setSecurity(new SecurityBuilder() + .withNewTls().withKeystoreSecretName(Constants.KAFKA_USER + "-keystore") + .withTruststoreSecretName(Constants.KAFKA + "-cluster-ca-truststore").endTls().build()); + + apicurioRegistry.getSpec().getConfiguration().getKafkasql().setBootstrapServers( + Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + ".svc.cluster.local:9093"); } public static void updateWithDefaultSCRAM(ApicurioRegistry apicurioRegistry) { - apicurioRegistry - .getSpec() - .getConfiguration() - .getKafkasql() - .setSecurity( - new SecurityBuilder() - .withNewScram() - .withTruststoreSecretName(Constants.KAFKA + "-cluster-ca-truststore") - .withPasswordSecretName(Constants.KAFKA_USER) - .withUser(Constants.KAFKA_USER) - .endScram() - .build() - ); - - apicurioRegistry - .getSpec() - .getConfiguration() - .getKafkasql() - .setBootstrapServers( - Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + - ".svc.cluster.local:9093" - ); + apicurioRegistry.getSpec().getConfiguration().getKafkasql() + .setSecurity(new SecurityBuilder().withNewScram() + .withTruststoreSecretName(Constants.KAFKA + "-cluster-ca-truststore") + .withPasswordSecretName(Constants.KAFKA_USER).withUser(Constants.KAFKA_USER) + .endScram().build()); + + apicurioRegistry.getSpec().getConfiguration().getKafkasql().setBootstrapServers( + Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + ".svc.cluster.local:9093"); } public static void updateWithDefaultKeycloak(ApicurioRegistry apicurioRegistry) { - apicurioRegistry - .getSpec() - .getConfiguration() - .setSecurity( - new io.apicur.registry.v1.apicurioregistryspec.configuration.SecurityBuilder() - .withNewKeycloak() - .withApiClientId(Constants.SSO_CLIENT_API) - .withUiClientId(Constants.SSO_CLIENT_UI) - .withRealm(Constants.SSO_REALM) - .withUrl(KeycloakUtils.getDefaultKeycloakURL()) - .endKeycloak() - .build() - ); + apicurioRegistry.getSpec().getConfiguration() + .setSecurity(new io.apicur.registry.v1.apicurioregistryspec.configuration.SecurityBuilder() + .withNewKeycloak().withApiClientId(Constants.SSO_CLIENT_API) + .withUiClientId(Constants.SSO_CLIENT_UI).withRealm(Constants.SSO_REALM) + .withUrl(KeycloakUtils.getDefaultKeycloakURL()).endKeycloak().build()); } } \ No newline at end of file diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/CatalogSourceResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/CatalogSourceResourceType.java index a63c930b4a..946da05ba5 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/CatalogSourceResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/CatalogSourceResourceType.java @@ -34,7 +34,8 @@ public void createOrReplace(CatalogSource resource) { @Override public void delete(CatalogSource resource) throws Exception { - Kubernetes.deleteCatalogSource(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + Kubernetes.deleteCatalogSource(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); } @Override @@ -65,17 +66,8 @@ public void refreshResource(CatalogSource existing, CatalogSource newResource) { /** Get default instances **/ public static CatalogSource getDefault(String name, String namespace, String image) { - return new CatalogSourceBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withDisplayName("Registry Operator Catalog Source") - .withImage(image) - .withPublisher("registry-qe") - .withSourceType("grpc") - .endSpec() - .build(); + return new CatalogSourceBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().withNewSpec().withDisplayName("Registry Operator Catalog Source") + .withImage(image).withPublisher("registry-qe").withSourceType("grpc").endSpec().build(); } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/DeploymentResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/DeploymentResourceType.java index 99ff237973..bfed7c9c9b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/DeploymentResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/DeploymentResourceType.java @@ -54,14 +54,9 @@ public boolean isReady(Deployment resource) { return false; } - return deployment - .getStatus() - .getConditions() - .stream() + return deployment.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("Available")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -94,62 +89,34 @@ private static List getDefaultPostgresqlEnvVars() { } private static Container getDefaultPostgresqlContainer(String name) { - return new ContainerBuilder() - .withEnv(getDefaultPostgresqlEnvVars()) - .withImage("quay.io/centos7/postgresql-12-centos7:latest") - .withImagePullPolicy("Always") - .withName(name) - .addNewPort() - .withContainerPort(5432) - .withName("postgresql") - .withProtocol("TCP") - .endPort() - .withNewReadinessProbe() - .withNewTcpSocket() - .withNewPort(5432) - .endTcpSocket() - .endReadinessProbe() - .withNewLivenessProbe() - .withNewTcpSocket() - .withNewPort(5432) - .endTcpSocket() - .endLivenessProbe() - .withVolumeMounts(new VolumeMount() {{ - setMountPath("/var/lib/pgsql/data"); - setName(name); - }}) - .build(); + return new ContainerBuilder().withEnv(getDefaultPostgresqlEnvVars()) + .withImage("quay.io/centos7/postgresql-12-centos7:latest").withImagePullPolicy("Always") + .withName(name).addNewPort().withContainerPort(5432).withName("postgresql") + .withProtocol("TCP").endPort().withNewReadinessProbe().withNewTcpSocket().withNewPort(5432) + .endTcpSocket().endReadinessProbe().withNewLivenessProbe().withNewTcpSocket() + .withNewPort(5432).endTcpSocket().endLivenessProbe().withVolumeMounts(new VolumeMount() { + { + setMountPath("/var/lib/pgsql/data"); + setName(name); + } + }).build(); } public static Deployment getDefaultPostgresql(String name, String namespace) { - return new DeploymentBuilder() - .withNewMetadata() - .addToLabels("app", name) - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withReplicas(1) - .withNewSelector() - .addToMatchLabels("app", name) - .endSelector() - .withNewTemplate() - .withNewMetadata() - .addToLabels("app", name) - .endMetadata() - .withNewSpec() - .withContainers(getDefaultPostgresqlContainer(name)) - .withVolumes(new Volume() {{ - setName(name); - setPersistentVolumeClaim(new PersistentVolumeClaimVolumeSource() {{ - setClaimName(name); - }}); - }}) - .withRestartPolicy("Always") - .endSpec() - .endTemplate() - .endSpec() - .build(); + return new DeploymentBuilder().withNewMetadata().addToLabels("app", name).withName(name) + .withNamespace(namespace).endMetadata().withNewSpec().withReplicas(1).withNewSelector() + .addToMatchLabels("app", name).endSelector().withNewTemplate().withNewMetadata() + .addToLabels("app", name).endMetadata().withNewSpec() + .withContainers(getDefaultPostgresqlContainer(name)).withVolumes(new Volume() { + { + setName(name); + setPersistentVolumeClaim(new PersistentVolumeClaimVolumeSource() { + { + setClaimName(name); + } + }); + } + }).withRestartPolicy("Always").endSpec().endTemplate().endSpec().build(); } public static Deployment getDefaultPostgresql() { @@ -157,47 +124,18 @@ public static Deployment getDefaultPostgresql() { } private static Container getDefaultSeleniumContainer(String name) { - return new ContainerBuilder() - .withName(name) - .withImage("quay.io/redhatqe/selenium-standalone") - .addNewPort() - .withContainerPort(4444) - .withName("http") - .withProtocol("TCP") - .endPort() - .withNewReadinessProbe() - .withNewHttpGet() - .withPath("/wd/hub/status") - .withNewPort(4444) - .endHttpGet() - .withInitialDelaySeconds(10) - .withPeriodSeconds(2) - .endReadinessProbe() - .build(); + return new ContainerBuilder().withName(name).withImage("quay.io/redhatqe/selenium-standalone") + .addNewPort().withContainerPort(4444).withName("http").withProtocol("TCP").endPort() + .withNewReadinessProbe().withNewHttpGet().withPath("/wd/hub/status").withNewPort(4444) + .endHttpGet().withInitialDelaySeconds(10).withPeriodSeconds(2).endReadinessProbe().build(); } public static Deployment getDefaultSelenium(String name, String namespace) { - return new DeploymentBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("app", name)) - .endMetadata() - .withNewSpec() - .withReplicas(1) - .withNewSelector() - .addToMatchLabels("app", name) - .endSelector() - .withNewTemplate() - .withNewMetadata() - .addToLabels("app", name) - .endMetadata() - .withNewSpec() - .withContainers(getDefaultSeleniumContainer(name)) - .endSpec() - .endTemplate() - .endSpec() - .build(); + return new DeploymentBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("app", name)).endMetadata().withNewSpec().withReplicas(1) + .withNewSelector().addToMatchLabels("app", name).endSelector().withNewTemplate() + .withNewMetadata().addToLabels("app", name).endMetadata().withNewSpec() + .withContainers(getDefaultSeleniumContainer(name)).endSpec().endTemplate().endSpec().build(); } public static Deployment getDefaultSelenium() { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/IngressResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/IngressResourceType.java index ff276a3a47..6ae51af452 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/IngressResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/IngressResourceType.java @@ -30,10 +30,7 @@ public String getKind() { @Override public Ingress get(String namespace, String name) { - return getOperation() - .inNamespace(namespace) - .withName(name) - .get(); + return getOperation().inNamespace(namespace).withName(name).get(); } public static MixedOperation, Resource> getOperation() { @@ -42,24 +39,18 @@ public static MixedOperation, Resource< @Override public void create(Ingress resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .create(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).create(resource); } @Override public void createOrReplace(Ingress resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .createOrReplace(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).createOrReplace(resource); } @Override public void delete(Ingress resource) throws Exception { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .withName(resource.getMetadata().getName()) - .delete(); + getOperation().inNamespace(resource.getMetadata().getNamespace()) + .withName(resource.getMetadata().getName()).delete(); } @Override @@ -85,35 +76,42 @@ public void refreshResource(Ingress existing, Ingress newResource) { /** Get default instances **/ - public static Ingress getDefaultSelenium(String name, String namespace) { - return new IngressBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("app", name)) - .endMetadata() - .withNewSpec() - .withRules(new IngressRule() {{ - setHost(name + ".127.0.0.1.nip.io"); - setHttp(new HTTPIngressRuleValue() {{ - setPaths(new ArrayList<>() {{ - add(new HTTPIngressPath() {{ - setPath("/"); - setPathType("Prefix"); - setBackend(new IngressBackend() {{ - setService(new IngressServiceBackend() {{ - setName(name); - setPort(new ServiceBackendPort() {{ - setNumber(4444); - }}); - }}); - }}); - }}); - }}); - }}); - }}) - .endSpec() - .build(); + public static Ingress getDefaultSelenium(String name, String namespace) { + return new IngressBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("app", name)).endMetadata().withNewSpec() + .withRules(new IngressRule() { + { + setHost(name + ".127.0.0.1.nip.io"); + setHttp(new HTTPIngressRuleValue() { + { + setPaths(new ArrayList<>() { + { + add(new HTTPIngressPath() { + { + setPath("/"); + setPathType("Prefix"); + setBackend(new IngressBackend() { + { + setService(new IngressServiceBackend() { + { + setName(name); + setPort(new ServiceBackendPort() { + { + setNumber(4444); + } + }); + } + }); + } + }); + } + }); + } + }); + } + }); + } + }).endSpec().build(); } public static Ingress getDefaultSelenium() { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaConnectResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaConnectResourceType.java index 4c59376b5d..1161f28916 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaConnectResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaConnectResourceType.java @@ -34,49 +34,38 @@ public KafkaConnect get(String namespace, String name) { return getOperation().inNamespace(namespace).withName(name).get(); } - public static MixedOperation, Resource> - getOperation() { + public static MixedOperation, Resource> getOperation() { return Kubernetes.getResources(KafkaConnect.class); } @Override public void create(KafkaConnect resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .create(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).create(resource); } @Override public void createOrReplace(KafkaConnect resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .createOrReplace(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).createOrReplace(resource); } @Override public void delete(KafkaConnect resource) throws Exception { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .withName(resource.getMetadata().getName()) - .delete(); + getOperation().inNamespace(resource.getMetadata().getNamespace()) + .withName(resource.getMetadata().getName()).delete(); } @Override public boolean isReady(KafkaConnect resource) { - KafkaConnect kafkaConnect = get(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + KafkaConnect kafkaConnect = get(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); if (kafkaConnect == null || kafkaConnect.getStatus() == null) { return false; } - return kafkaConnect - .getStatus() - .getConditions() - .stream() + return kafkaConnect.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("Ready")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -98,30 +87,20 @@ public void refreshResource(KafkaConnect existing, KafkaConnect newResource) { /** Get default instances **/ private static Plugin getDefaultDebeziumPlugin() { - return new PluginBuilder() - .withName("debezium-connector-postgres") - .withArtifacts(new TgzArtifactBuilder() - .withUrl( - "https://repo1.maven.org/maven2/io/debezium/debezium-connector-postgres/1.4.1.Final/" + - "debezium-connector-postgres-1.4.1.Final-plugin.tar.gz" - ) + return new PluginBuilder().withName("debezium-connector-postgres") + .withArtifacts(new TgzArtifactBuilder().withUrl( + "https://repo1.maven.org/maven2/io/debezium/debezium-connector-postgres/1.4.1.Final/" + + "debezium-connector-postgres-1.4.1.Final-plugin.tar.gz") .withSha512sum( - "99b0924aad98c6066e6bd22a05cf25789e6ba95ed53102d0c76e7775c3966ac8cf1b9a88e779685123c9" + - "0e0bd1512d3bb986ad5052e8cae18cbcd2e8cf16f116" - ) - .build() - ) + "99b0924aad98c6066e6bd22a05cf25789e6ba95ed53102d0c76e7775c3966ac8cf1b9a88e779685123c9" + + "0e0bd1512d3bb986ad5052e8cae18cbcd2e8cf16f116") + .build()) .build(); } private static Plugin getDefaultApicurioPlugin() { - return new PluginBuilder() - .withName("apicurio-converters") - .withArtifacts(new TgzArtifactBuilder() - .withUrl(Environment.CONVERTERS_URL) - .withSha512sum(Environment.CONVERTERS_SHA512SUM) - .build() - ) + return new PluginBuilder().withName("apicurio-converters").withArtifacts(new TgzArtifactBuilder() + .withUrl(Environment.CONVERTERS_URL).withSha512sum(Environment.CONVERTERS_SHA512SUM).build()) .build(); } @@ -135,30 +114,17 @@ private static List getDefaultPlugins() { } public static KafkaConnect getDefault(String name, String namespace) { - return new KafkaConnectBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withAnnotations(Collections.singletonMap("strimzi.io/use-connector-resources", "true")) - .endMetadata() - .withNewSpec() - .withReplicas(3) - .withBootstrapServers( - Constants.KAFKA + "-kafka-bootstrap." + Environment.NAMESPACE + - ".svc.cluster.local:9092" - ) - .withNewBuild() - .withOutput(new DockerOutputBuilder() - .withImage( - "image-registry.openshift-image-registry.svc:5000/" + - namespace + "/apicurio-debezium:latest-ci" - ) - .build() - ) - .withPlugins(getDefaultPlugins()) - .endBuild() - .endSpec() - .build(); + return new KafkaConnectBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withAnnotations(Collections.singletonMap("strimzi.io/use-connector-resources", "true")) + .endMetadata().withNewSpec().withReplicas(3) + .withBootstrapServers(Constants.KAFKA + + "-kafka-bootstrap." + Environment.NAMESPACE + ".svc.cluster.local:9092") + .withNewBuild() + .withOutput(new DockerOutputBuilder() + .withImage("image-registry.openshift-image-registry.svc:5000/" + namespace + + "/apicurio-debezium:latest-ci") + .build()) + .withPlugins(getDefaultPlugins()).endBuild().endSpec().build(); } public static KafkaConnect getDefault() { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaKind.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaKind.java index cbcce49b8b..17af274a53 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaKind.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaKind.java @@ -1,7 +1,5 @@ package io.apicurio.registry.systemtests.registryinfra.resources; public enum KafkaKind { - NO_AUTH, - TLS, - SCRAM + NO_AUTH, TLS, SCRAM } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaResourceType.java index 4370da32cc..8a508136a8 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaResourceType.java @@ -39,10 +39,7 @@ public String getKind() { @Override public Kafka get(String namespace, String name) { - return getOperation() - .inNamespace(namespace) - .withName(name) - .get(); + return getOperation().inNamespace(namespace).withName(name).get(); } public static MixedOperation, Resource> getOperation() { @@ -51,24 +48,18 @@ public static MixedOperation, Resource condition.getType().equals("Ready")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -108,131 +94,68 @@ public void refreshResource(Kafka existing, Kafka newResource) { /** Get default instances **/ public static GenericKafkaListener getPlainListener() { - return new GenericKafkaListenerBuilder() - .withName("plain") - .withPort(9092) - .withType(KafkaListenerType.INTERNAL) - .withTls(false) - .build(); + return new GenericKafkaListenerBuilder().withName("plain").withPort(9092) + .withType(KafkaListenerType.INTERNAL).withTls(false).build(); } public static GenericKafkaListener getTlsListener() { - return new GenericKafkaListenerBuilder() - .withName("tls") - .withPort(9093) - .withType(KafkaListenerType.INTERNAL) - .withTls(true) - .withAuth(new KafkaListenerAuthenticationTls()) - .build(); + return new GenericKafkaListenerBuilder().withName("tls").withPort(9093) + .withType(KafkaListenerType.INTERNAL).withTls(true) + .withAuth(new KafkaListenerAuthenticationTls()).build(); } public static GenericKafkaListener getScramListener() { - return new GenericKafkaListenerBuilder() - .withName("tls") - .withPort(9093) - .withType(KafkaListenerType.INTERNAL) - .withTls(true) - .withAuth(new KafkaListenerAuthenticationScramSha512()) - .build(); + return new GenericKafkaListenerBuilder().withName("tls").withPort(9093) + .withType(KafkaListenerType.INTERNAL).withTls(true) + .withAuth(new KafkaListenerAuthenticationScramSha512()).build(); } public static Map getDefaultConfig() { - return new HashMap<>() {{ - put("offsets.topic.replication.factor", 3); - put("transaction.state.log.replication.factor", 3); - put("transaction.state.log.min.isr", 2); - }}; + return new HashMap<>() { + { + put("offsets.topic.replication.factor", 3); + put("transaction.state.log.replication.factor", 3); + put("transaction.state.log.min.isr", 2); + } + }; } public static PersistentClaimStorage getDefaultStorage() { - return new PersistentClaimStorageBuilder() - .withSize("100Gi") - .withDeleteClaim(true) - .build(); + return new PersistentClaimStorageBuilder().withSize("100Gi").withDeleteClaim(true).build(); } public static EntityOperatorSpec getDefaultEntityOperator() { - return new EntityOperatorSpecBuilder() - .withNewTopicOperator() - .endTopicOperator() - .withNewUserOperator() - .endUserOperator() - .build(); + return new EntityOperatorSpecBuilder().withNewTopicOperator().endTopicOperator().withNewUserOperator() + .endUserOperator().build(); } public static Kafka getDefaultNoAuth() { - return new KafkaBuilder() - .withNewMetadata() - .withName(Constants.KAFKA) - .withNamespace(Environment.NAMESPACE) - .endMetadata() - .withNewSpec() - .withNewKafka() - .withVersion(KAFKA_VERSION) - .withReplicas(KAFKA_REPLICAS) - .withListeners(getPlainListener()) - .withConfig(getDefaultConfig()) - .withStorage(getDefaultStorage()) - .endKafka() - .withNewZookeeper() - .withReplicas(ZOOKEEPER_REPLICAS) - .withStorage(getDefaultStorage()) - .endZookeeper() - .withEntityOperator(getDefaultEntityOperator()) - .endSpec() - .build(); + return new KafkaBuilder().withNewMetadata().withName(Constants.KAFKA) + .withNamespace(Environment.NAMESPACE).endMetadata().withNewSpec().withNewKafka() + .withVersion(KAFKA_VERSION).withReplicas(KAFKA_REPLICAS).withListeners(getPlainListener()) + .withConfig(getDefaultConfig()).withStorage(getDefaultStorage()).endKafka().withNewZookeeper() + .withReplicas(ZOOKEEPER_REPLICAS).withStorage(getDefaultStorage()).endZookeeper() + .withEntityOperator(getDefaultEntityOperator()).endSpec().build(); } public static Kafka getDefaultTLS() { - return new KafkaBuilder() - .withNewMetadata() - .withName(Constants.KAFKA) - .withNamespace(Environment.NAMESPACE) - .endMetadata() - .withNewSpec() - .withNewKafka() - .withVersion(KAFKA_VERSION) - .withReplicas(KAFKA_REPLICAS) - .withListeners( - getPlainListener(), - getTlsListener() - ) - .withConfig(getDefaultConfig()) - .withStorage(getDefaultStorage()) - .endKafka() - .withNewZookeeper() - .withReplicas(ZOOKEEPER_REPLICAS) - .withStorage(getDefaultStorage()) - .endZookeeper() - .withEntityOperator(getDefaultEntityOperator()) - .endSpec() - .build(); + return new KafkaBuilder().withNewMetadata().withName(Constants.KAFKA) + .withNamespace(Environment.NAMESPACE).endMetadata().withNewSpec().withNewKafka() + .withVersion(KAFKA_VERSION).withReplicas(KAFKA_REPLICAS) + .withListeners(getPlainListener(), getTlsListener()).withConfig(getDefaultConfig()) + .withStorage(getDefaultStorage()).endKafka().withNewZookeeper() + .withReplicas(ZOOKEEPER_REPLICAS).withStorage(getDefaultStorage()).endZookeeper() + .withEntityOperator(getDefaultEntityOperator()).endSpec().build(); } public static Kafka getDefaultSCRAM() { - return new KafkaBuilder() - .withNewMetadata() - .withName(Constants.KAFKA) - .withNamespace(Environment.NAMESPACE) - .endMetadata() - .withNewSpec() - .withNewKafka() - .withVersion(KAFKA_VERSION) - .withReplicas(KAFKA_REPLICAS) - .withListeners( - getPlainListener(), - getScramListener() - ) - .withConfig(getDefaultConfig()) - .withStorage(getDefaultStorage()) - .endKafka() - .withNewZookeeper() - .withReplicas(ZOOKEEPER_REPLICAS) - .withStorage(getDefaultStorage()) - .endZookeeper() - .withEntityOperator(getDefaultEntityOperator()) - .endSpec() - .build(); + return new KafkaBuilder().withNewMetadata().withName(Constants.KAFKA) + .withNamespace(Environment.NAMESPACE).endMetadata().withNewSpec().withNewKafka() + .withVersion(KAFKA_VERSION).withReplicas(KAFKA_REPLICAS) + .withListeners(getPlainListener(), getScramListener()).withConfig(getDefaultConfig()) + .withStorage(getDefaultStorage()).endKafka().withNewZookeeper() + .withReplicas(ZOOKEEPER_REPLICAS).withStorage(getDefaultStorage()).endZookeeper() + .withEntityOperator(getDefaultEntityOperator()).endSpec().build(); } public static Kafka getDefaultByKind(KafkaKind kind) { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaTopicResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaTopicResourceType.java index ebeb98de29..4b9e66b52b 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaTopicResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaTopicResourceType.java @@ -24,10 +24,7 @@ public String getKind() { @Override public KafkaTopic get(String namespace, String name) { - return getOperation() - .inNamespace(namespace) - .withName(name) - .get(); + return getOperation().inNamespace(namespace).withName(name).get(); } public static MixedOperation, Resource> getOperation() { @@ -36,24 +33,18 @@ public static MixedOperation, Res @Override public void create(KafkaTopic resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .create(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).create(resource); } @Override public void createOrReplace(KafkaTopic resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .createOrReplace(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).createOrReplace(resource); } @Override public void delete(KafkaTopic resource) throws Exception { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .withName(resource.getMetadata().getName()) - .delete(); + getOperation().inNamespace(resource.getMetadata().getNamespace()) + .withName(resource.getMetadata().getName()).delete(); } @Override @@ -64,14 +55,9 @@ public boolean isReady(KafkaTopic resource) { return false; } - return kafkaTopic - .getStatus() - .getConditions() - .stream() + return kafkaTopic.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("Ready")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -93,20 +79,13 @@ public void refreshResource(KafkaTopic existing, KafkaTopic newResource) { /** Get default instances **/ public static KafkaTopic getDefault(String name, String namespace, String clusterName) { - return new KafkaTopicBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("strimzi.io/cluster", clusterName)) - .endMetadata() - .withNewSpec() - .withPartitions(3) - .withReplicas(3) - .withConfig(new HashMap<>() {{ + return new KafkaTopicBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("strimzi.io/cluster", clusterName)).endMetadata() + .withNewSpec().withPartitions(3).withReplicas(3).withConfig(new HashMap<>() { + { put("retention.ms", 7200000); put("segment.bytes", 1073741824); - }}) - .endSpec() - .build(); + } + }).endSpec().build(); } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaUserResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaUserResourceType.java index 7bbc3d7f30..397ff2a0c8 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaUserResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/KafkaUserResourceType.java @@ -37,24 +37,18 @@ public static MixedOperation, Resou @Override public void create(KafkaUser resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .create(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).create(resource); } @Override public void createOrReplace(KafkaUser resource) { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .createOrReplace(resource); + getOperation().inNamespace(resource.getMetadata().getNamespace()).createOrReplace(resource); } @Override public void delete(KafkaUser resource) throws Exception { - getOperation() - .inNamespace(resource.getMetadata().getNamespace()) - .withName(resource.getMetadata().getName()) - .delete(); + getOperation().inNamespace(resource.getMetadata().getNamespace()) + .withName(resource.getMetadata().getName()).delete(); } @Override @@ -65,14 +59,9 @@ public boolean isReady(KafkaUser resource) { return false; } - return kafkaUser - .getStatus() - .getConditions() - .stream() + return kafkaUser.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("Ready")) - .map(condition -> condition.getStatus().equals("True")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("True")).findFirst().orElse(false); } @Override @@ -93,7 +82,8 @@ public void refreshResource(KafkaUser existing, KafkaUser newResource) { /** Get default instances **/ - public static KafkaUser getDefaultByKind(String name, String namespace, String clusterName, KafkaKind kafkaKind) { + public static KafkaUser getDefaultByKind(String name, String namespace, String clusterName, + KafkaKind kafkaKind) { KafkaUserAuthentication kafkaUserAuthentication = null; if (KafkaKind.TLS.equals(kafkaKind)) { @@ -102,16 +92,9 @@ public static KafkaUser getDefaultByKind(String name, String namespace, String c kafkaUserAuthentication = new KafkaUserScramSha512ClientAuthentication(); } - return new KafkaUserBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("strimzi.io/cluster", clusterName)) - .endMetadata() - .withNewSpec() - .withAuthentication(kafkaUserAuthentication) - .endSpec() - .build(); + return new KafkaUserBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("strimzi.io/cluster", clusterName)).endMetadata() + .withNewSpec().withAuthentication(kafkaUserAuthentication).endSpec().build(); } public static KafkaUser getDefaultTLS(String name, String namespace, String clusterName) { @@ -123,20 +106,11 @@ public static KafkaUser getDefaultSCRAM(String name, String namespace, String cl } public static KafkaUser getDefaultTLS() { - return getDefaultByKind( - Constants.KAFKA_USER, - Environment.NAMESPACE, - Constants.KAFKA, - KafkaKind.TLS - ); + return getDefaultByKind(Constants.KAFKA_USER, Environment.NAMESPACE, Constants.KAFKA, KafkaKind.TLS); } public static KafkaUser getDefaultSCRAM() { - return getDefaultByKind( - Constants.KAFKA_USER, - Environment.NAMESPACE, - Constants.KAFKA, - KafkaKind.SCRAM - ); + return getDefaultByKind(Constants.KAFKA_USER, Environment.NAMESPACE, Constants.KAFKA, + KafkaKind.SCRAM); } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/NamespaceResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/NamespaceResourceType.java index 755af9293c..276c85f81a 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/NamespaceResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/NamespaceResourceType.java @@ -67,10 +67,6 @@ public void refreshResource(Namespace existing, Namespace newResource) { /** Get default instances **/ public static Namespace getDefault(String name) { - return new NamespaceBuilder() - .withNewMetadata() - .withName(name) - .endMetadata() - .build(); + return new NamespaceBuilder().withNewMetadata().withName(name).endMetadata().build(); } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/OperatorGroupResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/OperatorGroupResourceType.java index d6dfadbd1f..2bf0bc18b2 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/OperatorGroupResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/OperatorGroupResourceType.java @@ -33,7 +33,8 @@ public void createOrReplace(OperatorGroup resource) { @Override public void delete(OperatorGroup resource) throws Exception { - Kubernetes.deleteOperatorGroup(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + Kubernetes.deleteOperatorGroup(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); } @Override diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistenceKind.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistenceKind.java index 3710ad0774..e4a6245676 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistenceKind.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistenceKind.java @@ -1,7 +1,5 @@ package io.apicurio.registry.systemtests.registryinfra.resources; public enum PersistenceKind { - MEM, - SQL, - KAFKA_SQL; + MEM, SQL, KAFKA_SQL; } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistentVolumeClaimResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistentVolumeClaimResourceType.java index ad7e186b2e..c0947126dc 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistentVolumeClaimResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/PersistentVolumeClaimResourceType.java @@ -36,15 +36,14 @@ public void createOrReplace(PersistentVolumeClaim resource) { @Override public void delete(PersistentVolumeClaim resource) { - Kubernetes.deletePersistentVolumeClaim(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + Kubernetes.deletePersistentVolumeClaim(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); } @Override public boolean isReady(PersistentVolumeClaim resource) { - PersistentVolumeClaim persistentVolumeClaim = get( - resource.getMetadata().getNamespace(), - resource.getMetadata().getName() - ); + PersistentVolumeClaim persistentVolumeClaim = get(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); if (persistentVolumeClaim == null) { return false; @@ -72,21 +71,13 @@ public void refreshResource(PersistentVolumeClaim existing, PersistentVolumeClai /** Get default instances **/ public static PersistentVolumeClaim getDefaultPostgresql(String name, String namespace, String quantity) { - return new PersistentVolumeClaimBuilder() - .withNewMetadata() - .withLabels(new HashMap<>() {{ - put("app", name); - }}) - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withAccessModes("ReadWriteOnce") - .withNewResources() - .addToRequests("storage", new Quantity(quantity)) - .endResources() - .endSpec() - .build(); + return new PersistentVolumeClaimBuilder().withNewMetadata().withLabels(new HashMap<>() { + { + put("app", name); + } + }).withName(name).withNamespace(namespace).endMetadata().withNewSpec() + .withAccessModes("ReadWriteOnce").withNewResources() + .addToRequests("storage", new Quantity(quantity)).endResources().endSpec().build(); } public static PersistentVolumeClaim getDefaultPostgresql(String name, String namespace) { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ResourceType.java index ef5cdc099a..7af1ba33b4 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ResourceType.java @@ -7,6 +7,7 @@ public interface ResourceType { Duration getTimeout(); + String getKind(); T get(String namespace, String name); diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/RouteResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/RouteResourceType.java index a38cb7397c..3b482fc088 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/RouteResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/RouteResourceType.java @@ -42,7 +42,8 @@ public void delete(Route resource) throws Exception { @Override public boolean isReady(Route resource) { - return Kubernetes.isRouteReady(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + return Kubernetes.isRouteReady(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); } @Override @@ -64,38 +65,27 @@ public void refreshResource(Route existing, Route newResource) { /** Get default instances **/ public static Route getDefaultKeycloak(String namespace) { - return new RouteBuilder() - .withNewMetadata() - .withName(Constants.SSO_HTTP_SERVICE) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withPath("/") - .withTo(new RouteTargetReference() {{ + return new RouteBuilder().withNewMetadata().withName(Constants.SSO_HTTP_SERVICE) + .withNamespace(namespace).endMetadata().withNewSpec().withPath("/") + .withTo(new RouteTargetReference() { + { setKind("Service"); setName(Constants.SSO_HTTP_SERVICE); setWeight(100); - }}) - .endSpec() - .build(); + } + }).endSpec().build(); } public static Route getDefaultSelenium(String name, String namespace) { - return new RouteBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("app", name)) - .endMetadata() - .withNewSpec() - .withPath("/") - .withTo(new RouteTargetReference() {{ + return new RouteBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("app", name)).endMetadata().withNewSpec().withPath("/") + .withTo(new RouteTargetReference() { + { setKind("Service"); setName(name); setWeight(100); - }}) - .endSpec() - .build(); + } + }).endSpec().build(); } public static Route getDefaultSelenium() { diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ServiceResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ServiceResourceType.java index 0459c915ca..aa7dc147b2 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ServiceResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/ServiceResourceType.java @@ -54,7 +54,8 @@ public boolean isReady(Service resource) { return true; } - return Kubernetes.isServiceReady(service.getMetadata().getNamespace(), service.getSpec().getSelector()); + return Kubernetes.isServiceReady(service.getMetadata().getNamespace(), + service.getSpec().getSelector()); } @Override @@ -76,27 +77,22 @@ public void refreshResource(Service existing, Service newResource) { /** Get default instances **/ public static Service getDefaultPostgresql(String name, String namespace) { - return new ServiceBuilder() - .withNewMetadata() - .withLabels(new HashMap<>() {{ - put("app", name); - }}) - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withPorts(new ServicePort() {{ - setName("postgresql"); - setPort(5432); - setProtocol("TCP"); - setTargetPort(new IntOrString(5432)); - }}) - .withSelector(new HashMap<>() {{ - put("app", name); - }}) - .withType("ClusterIP") - .endSpec() - .build(); + return new ServiceBuilder().withNewMetadata().withLabels(new HashMap<>() { + { + put("app", name); + } + }).withName(name).withNamespace(namespace).endMetadata().withNewSpec().withPorts(new ServicePort() { + { + setName("postgresql"); + setPort(5432); + setProtocol("TCP"); + setTargetPort(new IntOrString(5432)); + } + }).withSelector(new HashMap<>() { + { + put("app", name); + } + }).withType("ClusterIP").endSpec().build(); } public static Service getDefaultPostgresql() { @@ -104,42 +100,31 @@ public static Service getDefaultPostgresql() { } public static Service getDefaultKeycloakHttp(String namespace) { - return new ServiceBuilder() - .withNewMetadata() - .withName(Constants.SSO_HTTP_SERVICE) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withPorts(new ServicePort() {{ + return new ServiceBuilder().withNewMetadata().withName(Constants.SSO_HTTP_SERVICE) + .withNamespace(namespace).endMetadata().withNewSpec().withPorts(new ServicePort() { + { setPort(8080); setProtocol("TCP"); setTargetPort(new IntOrString(8080)); - }}) - .withSelector(new HashMap<>() {{ + } + }).withSelector(new HashMap<>() { + { put("app", "keycloak"); put("component", "keycloak"); - }}) - .withType("ClusterIP") - .endSpec() - .build(); + } + }).withType("ClusterIP").endSpec().build(); } public static Service getDefaultSelenium(String name, String namespace) { - return new ServiceBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .withLabels(Collections.singletonMap("app", name)) - .endMetadata() - .withNewSpec() - .withPorts(new ServicePort() {{ + return new ServiceBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .withLabels(Collections.singletonMap("app", name)).endMetadata().withNewSpec() + .withPorts(new ServicePort() { + { setPort(4444); setProtocol("TCP"); setName("http"); - }}) - .withSelector(Collections.singletonMap("app", name)) - .withType("ClusterIP") - .endSpec() + } + }).withSelector(Collections.singletonMap("app", name)).withType("ClusterIP").endSpec() .build(); } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/SubscriptionResourceType.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/SubscriptionResourceType.java index 2ddc159353..96c4bbb341 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/SubscriptionResourceType.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/registryinfra/resources/SubscriptionResourceType.java @@ -34,25 +34,22 @@ public void createOrReplace(Subscription resource) { @Override public void delete(Subscription resource) throws Exception { - Kubernetes.deleteSubscription(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + Kubernetes.deleteSubscription(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); } @Override public boolean isReady(Subscription resource) { - Subscription subscription = get(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); + Subscription subscription = get(resource.getMetadata().getNamespace(), + resource.getMetadata().getName()); if (subscription == null || subscription.getStatus() == null) { return false; } - return subscription - .getStatus() - .getConditions() - .stream() + return subscription.getStatus().getConditions().stream() .filter(condition -> condition.getType().equals("CatalogSourcesUnhealthy")) - .map(condition -> condition.getStatus().equals("False")) - .findFirst() - .orElse(false); + .map(condition -> condition.getStatus().equals("False")).findFirst().orElse(false); } @Override @@ -73,28 +70,11 @@ public void refreshResource(Subscription existing, Subscription newResource) { /** Get default instances **/ - public static Subscription getDefault( - String name, - String namespace, - String packageName, - String sourceName, - String sourceNamespace, - String startingCSV, - String channel - ) { - return new SubscriptionBuilder() - .withNewMetadata() - .withName(name) - .withNamespace(namespace) - .endMetadata() - .withNewSpec() - .withName(packageName) - .withSource(sourceName) - .withSourceNamespace(sourceNamespace) - .withStartingCSV(startingCSV) - .withChannel(channel) - .withInstallPlanApproval("Automatic") - .endSpec() - .build(); + public static Subscription getDefault(String name, String namespace, String packageName, + String sourceName, String sourceNamespace, String startingCSV, String channel) { + return new SubscriptionBuilder().withNewMetadata().withName(name).withNamespace(namespace) + .endMetadata().withNewSpec().withName(packageName).withSource(sourceName) + .withSourceNamespace(sourceNamespace).withStartingCSV(startingCSV).withChannel(channel) + .withInstallPlanApproval("Automatic").endSpec().build(); } } diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/resolver/ExtensionContextParameterResolver.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/resolver/ExtensionContextParameterResolver.java index 387ad7662e..1cfe24e9f6 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/resolver/ExtensionContextParameterResolver.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/resolver/ExtensionContextParameterResolver.java @@ -8,16 +8,14 @@ public class ExtensionContextParameterResolver implements ParameterResolver { @Override - public boolean supportsParameter( - ParameterContext parameterContext, ExtensionContext extensionContext - ) throws ParameterResolutionException { + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { return (parameterContext.getParameter().getType() == ExtensionContext.class); } @Override - public Object resolveParameter( - ParameterContext parameterContext, ExtensionContext extensionContext - ) throws ParameterResolutionException { + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { return extensionContext; } } \ No newline at end of file diff --git a/system-tests/src/main/java/io/apicurio/registry/systemtests/time/TimeoutBudget.java b/system-tests/src/main/java/io/apicurio/registry/systemtests/time/TimeoutBudget.java index 018089789d..748a1453cf 100644 --- a/system-tests/src/main/java/io/apicurio/registry/systemtests/time/TimeoutBudget.java +++ b/system-tests/src/main/java/io/apicurio/registry/systemtests/time/TimeoutBudget.java @@ -39,6 +39,7 @@ public long timeLeft() { /** * Get the remaining time of the budget. + * * @return The remaining time. */ public Duration remaining() { diff --git a/system-tests/src/test/java/io/apicurio/registry/systemtests/APITests.java b/system-tests/src/test/java/io/apicurio/registry/systemtests/APITests.java index 7da2efcdb1..22c1f1ac18 100644 --- a/system-tests/src/test/java/io/apicurio/registry/systemtests/APITests.java +++ b/system-tests/src/test/java/io/apicurio/registry/systemtests/APITests.java @@ -18,7 +18,8 @@ public static void run(ApicurioRegistry apicurioRegistry) { run(apicurioRegistry, null, null, false); } - public static void run(ApicurioRegistry apicurioRegistry, String username, String password, boolean useToken) { + public static void run(ApicurioRegistry apicurioRegistry, String username, String password, + boolean useToken) { LOGGER.info("Running API tests..."); // Wait for readiness of Apicurio Registry hostname @@ -51,7 +52,8 @@ public static void run(ApicurioRegistry apicurioRegistry, String username, Strin // Create artifact LOGGER.info("Creating artifact..."); - Assertions.assertTrue(client.createArtifact(artifactGroupId, artifactId, ArtifactType.AVRO, artifactContent)); + Assertions.assertTrue( + client.createArtifact(artifactGroupId, artifactId, ArtifactType.AVRO, artifactContent)); // List artifacts LOGGER.info("Listing artifacts..."); diff --git a/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMTests.java b/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMTests.java index 800fbf8fd0..0f2c41d01e 100644 --- a/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMTests.java +++ b/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMTests.java @@ -31,7 +31,8 @@ public void setClusterWide(boolean clusterWide) { public void testBeforeEach(ExtensionContext testContext) throws InterruptedException { LOGGER.info("BeforeEach: " + testContext.getDisplayName()); - ApicurioRegistryOLMOperatorType registryOLMOperator = new ApicurioRegistryOLMOperatorType(clusterWide); + ApicurioRegistryOLMOperatorType registryOLMOperator = new ApicurioRegistryOLMOperatorType( + clusterWide); operatorManager.installOperator(registryOLMOperator); } @@ -54,11 +55,8 @@ public void testMultipleNamespaces(ExtensionContext testContext) throws Interrup DatabaseUtils.deployPostgresqlDatabase(testContext, secondSqlName, secondSqlNamespace); // Get second Apicurio Registry with second PostgreSQL database ApicurioRegistry secondSqlRegistry = ApicurioRegistryResourceType.getDefaultSql( - Constants.REGISTRY + suffix, - Constants.TESTSUITE_NAMESPACE + suffix, - secondSqlName, - secondSqlNamespace - ); + Constants.REGISTRY + suffix, Constants.TESTSUITE_NAMESPACE + suffix, secondSqlName, + secondSqlNamespace); // Deploy second Apicurio Registry with second PostgreSQL database if (clusterWide) { diff --git a/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMUpgradeTests.java b/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMUpgradeTests.java index f92b187cf2..707106bd41 100644 --- a/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMUpgradeTests.java +++ b/system-tests/src/test/java/io/apicurio/registry/systemtests/OLMUpgradeTests.java @@ -33,22 +33,24 @@ public void testAfterEach(ExtensionContext testContext) { operatorManager.uninstallOperators(); } - public void runUpgradeTest(ExtensionContext testContext, boolean clusterWide) throws InterruptedException { - // Install operator from default catalog (do not use catalog source image, it will be used for upgrade) - ApicurioRegistryOLMOperatorType registryOLMOperator = new ApicurioRegistryOLMOperatorType( - null, - clusterWide - ); + public void runUpgradeTest(ExtensionContext testContext, boolean clusterWide) + throws InterruptedException { + // Install operator from default catalog (do not use catalog source image, it will be used for + // upgrade) + ApicurioRegistryOLMOperatorType registryOLMOperator = new ApicurioRegistryOLMOperatorType(null, + clusterWide); operatorManager.installOperator(registryOLMOperator); // Save current (pre-upgrade) ClusterServiceVersion of operator - DefaultArtifactVersion oldVersion = new DefaultArtifactVersion(registryOLMOperator.getClusterServiceVersion()); + DefaultArtifactVersion oldVersion = new DefaultArtifactVersion( + registryOLMOperator.getClusterServiceVersion()); // DEPLOY REGISTRY // Deploy PostgreSQL database for registry DatabaseUtils.deployDefaultPostgresqlDatabase(testContext); // Deploy registry with PostgreSQL storage - ApicurioRegistry apicurioRegistry = ApicurioRegistryUtils.deployDefaultApicurioRegistrySql(testContext, false); + ApicurioRegistry apicurioRegistry = ApicurioRegistryUtils + .deployDefaultApicurioRegistrySql(testContext, false); // Run basic API tests APITests.run(apicurioRegistry); @@ -63,25 +65,24 @@ public void runUpgradeTest(ExtensionContext testContext, boolean clusterWide) th // Create artifact one by one for (int i = 0; i < artifactsCount; i++) { // Create one single artifact in registry - apiClient.createArtifact("testsuite-upgrade", "upgrade-" + i, ArtifactType.AVRO, ArtifactContent.DEFAULT_AVRO); + apiClient.createArtifact("testsuite-upgrade", "upgrade-" + i, ArtifactType.AVRO, + ArtifactContent.DEFAULT_AVRO); } // CHECK CREATION OF ARTIFACTS // Get list of artifacts ArtifactList artifactList = apiClient.listArtifacts(); // Check number of present artifacts - Assertions.assertEquals(artifactList.getCount(), artifactsCount, MessageFormat.format( - "Registry {0} does not contain {1} artifacts, but {2}.", - apicurioRegistry.getMetadata().getName(), - artifactsCount, - artifactList.getCount() - )); + Assertions.assertEquals(artifactList.getCount(), artifactsCount, + MessageFormat.format("Registry {0} does not contain {1} artifacts, but {2}.", + apicurioRegistry.getMetadata().getName(), artifactsCount, artifactList.getCount())); // Run upgrade of operator from catalog source image registryOLMOperator.upgrade(); // Save current (post-upgrade) ClusterServiceVersion of operator - DefaultArtifactVersion newVersion = new DefaultArtifactVersion(registryOLMOperator.getClusterServiceVersion()); + DefaultArtifactVersion newVersion = new DefaultArtifactVersion( + registryOLMOperator.getClusterServiceVersion()); // Check if operator is updated if (oldVersion.compareTo(newVersion) < 0) { @@ -100,12 +101,9 @@ public void runUpgradeTest(ExtensionContext testContext, boolean clusterWide) th // Get list of artifacts artifactList = apiClient.listArtifacts(); // Check number of present artifacts - Assertions.assertEquals(artifactList.getCount(), artifactsCount, MessageFormat.format( - "Registry {0} does not contain {1} artifacts, but {2}.", - apicurioRegistry.getMetadata().getName(), - artifactsCount, - artifactList.getCount() - )); + Assertions.assertEquals(artifactList.getCount(), artifactsCount, + MessageFormat.format("Registry {0} does not contain {1} artifacts, but {2}.", + apicurioRegistry.getMetadata().getName(), artifactsCount, artifactList.getCount())); } @Test diff --git a/system-tests/src/test/java/io/apicurio/registry/systemtests/TestBase.java b/system-tests/src/test/java/io/apicurio/registry/systemtests/TestBase.java index 8b4a64b8de..b35404f83d 100644 --- a/system-tests/src/test/java/io/apicurio/registry/systemtests/TestBase.java +++ b/system-tests/src/test/java/io/apicurio/registry/systemtests/TestBase.java @@ -86,7 +86,8 @@ protected void afterAllTests() throws InterruptedException { @BeforeEach protected void beforeEachTest(TestInfo testInfo) { LoggerUtils.logDelimiter("#"); - LOGGER.info("[TEST-START] {}.{}-STARTED", testInfo.getTestClass().get().getName(), testInfo.getDisplayName()); + LOGGER.info("[TEST-START] {}.{}-STARTED", testInfo.getTestClass().get().getName(), + testInfo.getDisplayName()); LoggerUtils.logDelimiter("#"); LOGGER.info(""); } @@ -95,17 +96,13 @@ protected void beforeEachTest(TestInfo testInfo) { protected void afterEachTest(TestInfo testInfo) { LOGGER.info(""); LoggerUtils.logDelimiter("#"); - LOGGER.info("[TEST-END] {}.{}-FINISHED", testInfo.getTestClass().get().getName(), testInfo.getDisplayName()); + LOGGER.info("[TEST-END] {}.{}-FINISHED", testInfo.getTestClass().get().getName(), + testInfo.getDisplayName()); LoggerUtils.logDelimiter("#"); } - protected void runTest( - ExtensionContext testContext, - PersistenceKind persistenceKind, - KafkaKind kafkaKind, - boolean useKeycloak, - boolean testAPI - ) throws InterruptedException { + protected void runTest(ExtensionContext testContext, PersistenceKind persistenceKind, KafkaKind kafkaKind, + boolean useKeycloak, boolean testAPI) throws InterruptedException { ApicurioRegistry registry = null; if (persistenceKind.equals(PersistenceKind.SQL)) { @@ -121,28 +118,20 @@ protected void runTest( // Deploy noAuthKafka KafkaUtils.deployDefaultKafkaNoAuth(testContext); - registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlNoAuth( - testContext, - useKeycloak - ); + registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlNoAuth(testContext, + useKeycloak); } else if (kafkaKind.equals(KafkaKind.TLS)) { // Deploy tlsKafka kafka = KafkaUtils.deployDefaultKafkaTls(testContext); - registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlTLS( - testContext, - kafka, - useKeycloak - ); + registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlTLS(testContext, kafka, + useKeycloak); } else if (kafkaKind.equals(KafkaKind.SCRAM)) { // Deploy scramKafka kafka = KafkaUtils.deployDefaultKafkaScram(testContext); - registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlSCRAM( - testContext, - kafka, - useKeycloak - ); + registry = ApicurioRegistryUtils.deployDefaultApicurioRegistryKafkasqlSCRAM(testContext, + kafka, useKeycloak); } else { LOGGER.error("Unrecognized KafkaKind: {}.", kafkaKind); } diff --git a/system-tests/src/test/java/io/apicurio/registry/systemtests/Tests.java b/system-tests/src/test/java/io/apicurio/registry/systemtests/Tests.java index bb99a1404a..9b43986f78 100644 --- a/system-tests/src/test/java/io/apicurio/registry/systemtests/Tests.java +++ b/system-tests/src/test/java/io/apicurio/registry/systemtests/Tests.java @@ -55,7 +55,8 @@ public void testRegistrySqlKeycloak(ExtensionContext testContext) throws Interru /* TESTS - KafkaSQL */ @Test - public void testRegistryKafkasqlNoAuthNoKeycloak(ExtensionContext testContext) throws InterruptedException { + public void testRegistryKafkasqlNoAuthNoKeycloak(ExtensionContext testContext) + throws InterruptedException { runTest(testContext, PersistenceKind.KAFKA_SQL, KafkaKind.NO_AUTH, false, true); } @@ -75,7 +76,8 @@ public void testRegistryKafkasqlTLSKeycloak(ExtensionContext testContext) throws } @Test - public void testRegistryKafkasqlSCRAMNoKeycloak(ExtensionContext testContext) throws InterruptedException { + public void testRegistryKafkasqlSCRAMNoKeycloak(ExtensionContext testContext) + throws InterruptedException { runTest(testContext, PersistenceKind.KAFKA_SQL, KafkaKind.SCRAM, false, true); } diff --git a/utils/converter/pom.xml b/utils/converter/pom.xml index 88401839f8..84baf04780 100644 --- a/utils/converter/pom.xml +++ b/utils/converter/pom.xml @@ -1,46 +1,43 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-utils-converter - jar - apicurio-registry-utils-converter + apicurio-registry-utils-converter + jar + apicurio-registry-utils-converter - + - - io.apicurio - apicurio-registry-serdes-avro-serde - + + io.apicurio + apicurio-registry-serdes-avro-serde + - - io.apicurio - apicurio-registry-serdes-jsonschema-serde - + + io.apicurio + apicurio-registry-serdes-jsonschema-serde + - - org.apache.kafka - connect-api - + + org.apache.kafka + connect-api + - - org.apache.kafka - connect-json - + + org.apache.kafka + connect-json + - - org.junit.jupiter - junit-jupiter - test - - + + org.junit.jupiter + junit-jupiter + test + + diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/AvroConverter.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/AvroConverter.java index 3b80dc696b..5414ee0cd4 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/AvroConverter.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/AvroConverter.java @@ -16,7 +16,6 @@ /** * Avro converter. - * */ public class AvroConverter extends SerdeBasedConverter { private AvroData avroData; @@ -43,7 +42,7 @@ protected Class deserializerClass() { return AvroKafkaDeserializer.class; } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void configure(Map configs, boolean isKey) { // set defaults @@ -59,7 +58,7 @@ public void configure(Map configs, boolean isKey) { @SuppressWarnings("unchecked") @Override protected T applySchema(Schema schema, Object value) { - //noinspection unchecked + // noinspection unchecked return (T) avroData.fromConnectData(schema, value); } diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectEnum.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectEnum.java index b963dd8629..037123ed9f 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectEnum.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectEnum.java @@ -9,109 +9,102 @@ public class ConnectEnum { - public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Enum"; + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Enum"; - /** - * Returns a SchemaBuilder for an Enum. - * - * @param annotation an arbitrary annotation to be associated with the enum - * @param symbols the enum symbols - * @return a SchemaBuilder - */ - public static SchemaBuilder builder(String annotation, List symbols) { - SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); - for (int i = 0; i < symbols.size(); i++) { - builder.parameter(LOGICAL_PARAMETER + "." + symbols.get(i), String.valueOf(i)); + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols the enum symbols + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, List symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (int i = 0; i < symbols.size(); i++) { + builder.parameter(LOGICAL_PARAMETER + "." + symbols.get(i), String.valueOf(i)); + } + return builder; } - return builder; - } - /** - * Returns a SchemaBuilder for an Enum. - * - * @param annotation an arbitrary annotation to be associated with the enum - * @param symbols a map of enum symbol to its ordinal - * @return a SchemaBuilder - */ - public static SchemaBuilder builder(String annotation, Map symbols) { - SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); - for (Map.Entry symbol : symbols.entrySet()) { - builder.parameter(LOGICAL_PARAMETER + "." + symbol.getKey(), - String.valueOf(symbol.getValue())); + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols a map of enum symbol to its ordinal + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, Map symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (Map.Entry symbol : symbols.entrySet()) { + builder.parameter(LOGICAL_PARAMETER + "." + symbol.getKey(), String.valueOf(symbol.getValue())); + } + return builder; } - return builder; - } - /** - * Returns whether a schema represents an Enum. - * - * @param schema the schema - * @return whether the schema represents an Enum - */ - public static boolean isEnum(Schema schema) { - return schema != null - && schema.parameters() != null - && schema.parameters().containsKey(LOGICAL_PARAMETER); - } + /** + * Returns whether a schema represents an Enum. + * + * @param schema the schema + * @return whether the schema represents an Enum + */ + public static boolean isEnum(Schema schema) { + return schema != null && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); + } - /** - * Returns whether a schema has an Enum symbol. - * - * @param schema the schema - * @param symbol the enum symbol - * @return whether the schema represents an Enum - */ - public static boolean hasEnumSymbol(Schema schema, String symbol) { - return schema != null - && schema.parameters() != null - && schema.parameters().containsKey(LOGICAL_PARAMETER) - && schema.parameters().containsKey(LOGICAL_PARAMETER + "." + symbol); - } + /** + * Returns whether a schema has an Enum symbol. + * + * @param schema the schema + * @param symbol the enum symbol + * @return whether the schema represents an Enum + */ + public static boolean hasEnumSymbol(Schema schema, String symbol) { + return schema != null && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER) + && schema.parameters().containsKey(LOGICAL_PARAMETER + "." + symbol); + } - /** - * Convert a value from its logical format (Enum) to its encoded format. - * - * @param schema the schema - * @param value the logical value - * @return the encoded value - */ - public static > String fromLogical(Schema schema, T value) { - if (!hasEnumSymbol(schema, value.name())) { - throw new DataException( - "Requested conversion of Enum object but the schema does not match."); + /** + * Convert a value from its logical format (Enum) to its encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static > String fromLogical(Schema schema, T value) { + if (!hasEnumSymbol(schema, value.name())) { + throw new DataException("Requested conversion of Enum object but the schema does not match."); + } + return value.name(); } - return value.name(); - } - /** - * Convert a value from its encoded format to its logical format (Enum). - * - * @param schema the schema - * @param cls the class of the logical value - * @param symbol the enum symbol - * @return the logical value - */ - public static > T toLogical(Schema schema, Class cls, - String symbol) { - if (!hasEnumSymbol(schema, symbol)) { - throw new DataException( - "Requested conversion of Enum object but the schema does not match."); + /** + * Convert a value from its encoded format to its logical format (Enum). + * + * @param schema the schema + * @param cls the class of the logical value + * @param symbol the enum symbol + * @return the logical value + */ + public static > T toLogical(Schema schema, Class cls, String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException("Requested conversion of Enum object but the schema does not match."); + } + return Enum.valueOf(cls, symbol); } - return Enum.valueOf(cls, symbol); - } - /** - * Convert a value from its encoded format to its ordinal. - * - * @param schema the schema - * @param symbol the enum symbol - * @return the ordinal - */ - public static int toOrdinal(Schema schema, String symbol) { - if (!hasEnumSymbol(schema, symbol)) { - throw new DataException( - "Requested conversion of Enum object but the schema does not match."); + /** + * Convert a value from its encoded format to its ordinal. + * + * @param schema the schema + * @param symbol the enum symbol + * @return the ordinal + */ + public static int toOrdinal(Schema schema, String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException("Requested conversion of Enum object but the schema does not match."); + } + return Integer.parseInt(schema.parameters().get(LOGICAL_PARAMETER + "." + symbol)); } - return Integer.parseInt(schema.parameters().get(LOGICAL_PARAMETER + "." + symbol)); - } } diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectUnion.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectUnion.java index 60b2e12530..b203216069 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectUnion.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ConnectUnion.java @@ -9,104 +9,100 @@ public class ConnectUnion { - public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Union"; + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Union"; - /** - * Returns a SchemaBuilder for a Union. - * - * @param annotation an arbitrary annotation to be associated with the union - * @return a SchemaBuilder - */ - public static SchemaBuilder builder(String annotation) { - return SchemaBuilder.struct().parameter(LOGICAL_PARAMETER, annotation); - } - - /** - * Returns whether a schema represents a Union. - * - * @param schema the schema - * @return whether the schema represents a Union - */ - public static boolean isUnion(Schema schema) { - return schema != null - && schema.parameters() != null - && schema.parameters().containsKey(LOGICAL_PARAMETER); - } - - /** - * Convert a value from its logical format (Union) to it's encoded format. - * - * @param schema the schema - * @param value the logical value - * @return the encoded value - */ - public static Object fromLogical(Schema schema, Struct value) { - if (!isUnion(schema)) { - throw new DataException( - "Requested conversion of Union object but the schema does not match."); - } - for (Field field : schema.fields()) { - Object object = value.get(field); - if (object != null) { - return object; - } + /** + * Returns a SchemaBuilder for a Union. + * + * @param annotation an arbitrary annotation to be associated with the union + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation) { + return SchemaBuilder.struct().parameter(LOGICAL_PARAMETER, annotation); } - return null; - } - /** - * Convert a value from its encoded format to its logical format (Union). - * The value is associated with the field whose schema matches the given value. - * - * @param schema the schema - * @param value the encoded value - * @return the logical value - */ - public static Struct toLogical(Schema schema, Object value) { - if (!isUnion(schema)) { - throw new DataException( - "Requested conversion of Union object but the schema does not match."); + /** + * Returns whether a schema represents a Union. + * + * @param schema the schema + * @return whether the schema represents a Union + */ + public static boolean isUnion(Schema schema) { + return schema != null && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); } - Struct struct = new Struct(schema); - for (Field field : schema.fields()) { - if (validate(field.schema(), value)) { - struct.put(field, value); - break; - } + + /** + * Convert a value from its logical format (Union) to it's encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static Object fromLogical(Schema schema, Struct value) { + if (!isUnion(schema)) { + throw new DataException("Requested conversion of Union object but the schema does not match."); + } + for (Field field : schema.fields()) { + Object object = value.get(field); + if (object != null) { + return object; + } + } + return null; } - return struct; - } - private static boolean validate(Schema schema, Object value) { - try { - ConnectSchema.validateValue(schema, value); - } catch (DataException e) { - return false; + /** + * Convert a value from its encoded format to its logical format (Union). The value is associated with the + * field whose schema matches the given value. + * + * @param schema the schema + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogical(Schema schema, Object value) { + if (!isUnion(schema)) { + throw new DataException("Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (validate(field.schema(), value)) { + struct.put(field, value); + break; + } + } + return struct; } - return true; - } - /** - * Convert a value from its encoded format to its logical format (Union). - * The value is associated with the field with the given field name. - * - * @param schema the schema - * @param fieldName the field name - * @param value the encoded value - * @return the logical value - */ - public static Struct toLogicalUsingName(Schema schema, String fieldName, Object value) { - if (!isUnion(schema)) { - throw new DataException( - "Requested conversion of Union object but the schema does not match."); + private static boolean validate(Schema schema, Object value) { + try { + ConnectSchema.validateValue(schema, value); + } catch (DataException e) { + return false; + } + return true; } - Struct struct = new Struct(schema); - for (Field field : schema.fields()) { - if (field.name().equals(fieldName)) { - struct.put(field, value); - break; - } + + /** + * Convert a value from its encoded format to its logical format (Union). The value is associated with the + * field with the given field name. + * + * @param schema the schema + * @param fieldName the field name + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogicalUsingName(Schema schema, String fieldName, Object value) { + if (!isUnion(schema)) { + throw new DataException("Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (field.name().equals(fieldName)) { + struct.put(field, value); + break; + } + } + return struct; } - return struct; - } } diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ExtJsonConverter.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ExtJsonConverter.java index 16c955dabf..d81054692c 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ExtJsonConverter.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/ExtJsonConverter.java @@ -33,7 +33,8 @@ import java.util.Map; import java.util.Objects; -public class ExtJsonConverter extends SchemaResolverConfigurer implements Converter, SchemaParser, AutoCloseable { +public class ExtJsonConverter extends SchemaResolverConfigurer + implements Converter, SchemaParser, AutoCloseable { private final JsonConverter jsonConverter; private final JsonConverter deserializingConverter; private final ObjectMapper mapper; @@ -85,7 +86,8 @@ public byte[] fromConnectData(String topic, Headers headers, Schema schema, Obje return null; } - JsonConverterRecord record = new JsonConverterRecord(new JsonConverterMetadata(topic, isKey, headers, schema), value); + JsonConverterRecord record = new JsonConverterRecord( + new JsonConverterMetadata(topic, isKey, headers, schema), value); SchemaLookupResult schemaLookupResult = getSchemaResolver().resolveSchema(record); byte[] payload = jsonConverter.fromConnectData(topic, schema, value); @@ -100,12 +102,14 @@ public SchemaAndValue toConnectData(String topic, byte[] value) { long globalId = ip.getGlobalId(); - SchemaLookupResult schemaLookupResult = getSchemaResolver().resolveSchemaByArtifactReference(ArtifactReference.builder().globalId(globalId).build()); + SchemaLookupResult schemaLookupResult = getSchemaResolver() + .resolveSchemaByArtifactReference(ArtifactReference.builder().globalId(globalId).build()); JsonNode parsedSchema = schemaLookupResult.getParsedSchema().getParsedSchema(); JsonNode dataDeserialized = jsonDeserializer.deserialize(topic, ip.getPayload()); - //Since the json converter is expecting the data to have the schema to fully validate it, we build an envelope object containing the schema from registry and the data deserialized + // Since the json converter is expecting the data to have the schema to fully validate it, we build an + // envelope object containing the schema from registry and the data deserialized ObjectNode envelope = JsonNodeFactory.withExactBigDecimals(true).objectNode(); envelope.set("schema", parsedSchema); envelope.set("payload", dataDeserialized); @@ -119,7 +123,8 @@ public SchemaAndValue toConnectData(String topic, byte[] value) { throw new RuntimeException(e); } - Schema schema = deserializingConverter.asConnectSchema(schemaLookupResult.getParsedSchema().getParsedSchema()); + Schema schema = deserializingConverter + .asConnectSchema(schemaLookupResult.getParsedSchema().getParsedSchema()); return new SchemaAndValue(schema, sav.value()); } @@ -152,8 +157,7 @@ public ParsedSchema getSchemaFromData(Record data) { JsonConverterRecord jcr = (JsonConverterRecord) data; JsonNode jsonSchema = jsonConverter.asJsonSchema(jcr.metadata().getSchema()); String schemaString = jsonSchema != null ? jsonSchema.toString() : null; - return new ParsedSchemaImpl() - .setParsedSchema(jsonSchema) + return new ParsedSchemaImpl().setParsedSchema(jsonSchema) .setRawSchema(IoUtil.toBytes(schemaString)); } diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/SerdeBasedConverter.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/SerdeBasedConverter.java index 611789afa3..9186d8da56 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/SerdeBasedConverter.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/SerdeBasedConverter.java @@ -5,7 +5,6 @@ import io.apicurio.registry.serde.AbstractKafkaDeserializer; import io.apicurio.registry.serde.AbstractKafkaSerializer; import io.apicurio.registry.utils.IoUtil; - import org.apache.kafka.common.header.Headers; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serializer; @@ -20,7 +19,6 @@ /** * Very simplistic converter that delegates most of the work to the configured serializer and deserializer. * Subclasses should override applySchema(Schema, Object) and provideSchema(T) or toSchemaAndValue(T). - * */ @SuppressWarnings("rawtypes") public class SerdeBasedConverter implements Converter, Closeable { @@ -46,7 +44,7 @@ protected Class deserializerClass() { return Deserializer.class; } - //set converter's schema resolver, to share the cache between serializer and deserializer + // set converter's schema resolver, to share the cache between serializer and deserializer @SuppressWarnings("unchecked") @Override public void configure(Map configs, boolean isKey) { @@ -90,7 +88,7 @@ public void close() { @SuppressWarnings("unchecked") protected T applySchema(Schema schema, Object value) { - //noinspection unchecked + // noinspection unchecked return (T) value; } diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroData.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroData.java index 5873a1b445..1e018d9af1 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroData.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroData.java @@ -21,8 +21,8 @@ import org.apache.kafka.common.cache.Cache; import org.apache.kafka.common.cache.LRUCache; import org.apache.kafka.common.cache.SynchronizedCache; -import org.apache.kafka.connect.data.Date; import org.apache.kafka.connect.data.*; +import org.apache.kafka.connect.data.Date; import org.apache.kafka.connect.errors.DataException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,11 +36,10 @@ import java.util.*; import java.util.regex.Pattern; - /** * Utilities for converting between our runtime data format and Avro, and (de)serializing that data. */ -@SuppressWarnings({"unused", "rawtypes", "unchecked"}) +@SuppressWarnings({ "unused", "rawtypes", "unchecked" }) public class AvroData { private static final Logger log = LoggerFactory.getLogger(AvroData.class); @@ -66,8 +65,8 @@ public class AvroData { public static final String AVRO_RECORD_DOC_PROP = NAMESPACE + ".record.doc"; public static final String AVRO_ENUM_DOC_PREFIX_PROP = NAMESPACE + ".enum.doc."; public static final String AVRO_FIELD_DOC_PREFIX_PROP = NAMESPACE + ".field.doc."; - //This property is used to determine whether a default value in the Connect schema originated - //from an Avro field default + // This property is used to determine whether a default value in the Connect schema originated + // from an Avro field default public static final String AVRO_FIELD_DEFAULT_FLAG_PROP = NAMESPACE + ".field.default"; public static final String AVRO_ENUM_DEFAULT_PREFIX_PROP = NAMESPACE + ".enum.default."; @@ -84,8 +83,7 @@ public class AvroData { public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; public static final String GENERALIZED_TYPE_UNION_PREFIX = "connect_union_"; - public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = - GENERALIZED_TYPE_UNION_PREFIX + "field_"; + public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = GENERALIZED_TYPE_UNION_PREFIX + "field_"; private static final Map NON_AVRO_TYPES_BY_TYPE_CODE = new HashMap<>(); @@ -107,15 +105,13 @@ public class AvroData { SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.FLOAT64, Arrays.asList((Class) Double.class)); SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.BOOLEAN, Arrays.asList((Class) Boolean.class)); SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.STRING, Arrays.asList((Class) CharSequence.class)); - SIMPLE_AVRO_SCHEMA_TYPES.put( - Schema.Type.BYTES, - Arrays.asList((Class) ByteBuffer.class, (Class) byte[].class, (Class) GenericFixed.class)); + SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.BYTES, + Arrays.asList((Class) ByteBuffer.class, (Class) byte[].class, (Class) GenericFixed.class)); SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.ARRAY, Arrays.asList((Class) Collection.class)); SIMPLE_AVRO_SCHEMA_TYPES.put(Schema.Type.MAP, Arrays.asList((Class) Map.class)); } - private static final Map CONNECT_TYPES_TO_AVRO_TYPES - = new HashMap<>(); + private static final Map CONNECT_TYPES_TO_AVRO_TYPES = new HashMap<>(); static { CONNECT_TYPES_TO_AVRO_TYPES.put(Schema.Type.INT32, org.apache.avro.Schema.Type.INT); @@ -129,7 +125,6 @@ public class AvroData { CONNECT_TYPES_TO_AVRO_TYPES.put(Schema.Type.MAP, org.apache.avro.Schema.Type.MAP); } - private static final String ANYTHING_SCHEMA_BOOLEAN_FIELD = "boolean"; private static final String ANYTHING_SCHEMA_BYTES_FIELD = "bytes"; private static final String ANYTHING_SCHEMA_DOUBLE_FIELD = "double"; @@ -143,43 +138,37 @@ public class AvroData { public static final org.apache.avro.Schema ANYTHING_SCHEMA_MAP_ELEMENT; public static final org.apache.avro.Schema ANYTHING_SCHEMA; - private static final org.apache.avro.Schema - NULL_AVRO_SCHEMA = - org.apache.avro.Schema.create(org.apache.avro.Schema.Type.NULL); + private static final org.apache.avro.Schema NULL_AVRO_SCHEMA = org.apache.avro.Schema + .create(org.apache.avro.Schema.Type.NULL); static { // Intuitively this should be a union schema. However, unions can't be named in Avro and this // is a self-referencing type, so we need to use a format in which we can name the entire schema - ANYTHING_SCHEMA = - org.apache.avro.SchemaBuilder.record(AVRO_TYPE_ANYTHING).namespace(NAMESPACE).fields() - .optionalBoolean(ANYTHING_SCHEMA_BOOLEAN_FIELD) - .optionalBytes(ANYTHING_SCHEMA_BYTES_FIELD) - .optionalDouble(ANYTHING_SCHEMA_DOUBLE_FIELD) - .optionalFloat(ANYTHING_SCHEMA_FLOAT_FIELD) - .optionalInt(ANYTHING_SCHEMA_INT_FIELD) - .optionalLong(ANYTHING_SCHEMA_LONG_FIELD) - .optionalString(ANYTHING_SCHEMA_STRING_FIELD) - .name(ANYTHING_SCHEMA_ARRAY_FIELD).type().optional().array() - .items().type(AVRO_TYPE_ANYTHING) - .name(ANYTHING_SCHEMA_MAP_FIELD).type().optional().array() - .items().record(MAP_ENTRY_TYPE_NAME).namespace(NAMESPACE).fields() - .name(KEY_FIELD).type(AVRO_TYPE_ANYTHING).noDefault() - .name(VALUE_FIELD).type(AVRO_TYPE_ANYTHING).noDefault() - .endRecord() - .endRecord(); + ANYTHING_SCHEMA = org.apache.avro.SchemaBuilder.record(AVRO_TYPE_ANYTHING).namespace(NAMESPACE) + .fields().optionalBoolean(ANYTHING_SCHEMA_BOOLEAN_FIELD) + .optionalBytes(ANYTHING_SCHEMA_BYTES_FIELD).optionalDouble(ANYTHING_SCHEMA_DOUBLE_FIELD) + .optionalFloat(ANYTHING_SCHEMA_FLOAT_FIELD).optionalInt(ANYTHING_SCHEMA_INT_FIELD) + .optionalLong(ANYTHING_SCHEMA_LONG_FIELD).optionalString(ANYTHING_SCHEMA_STRING_FIELD) + .name(ANYTHING_SCHEMA_ARRAY_FIELD).type().optional().array().items().type(AVRO_TYPE_ANYTHING) + .name(ANYTHING_SCHEMA_MAP_FIELD).type().optional().array().items().record(MAP_ENTRY_TYPE_NAME) + .namespace(NAMESPACE).fields().name(KEY_FIELD).type(AVRO_TYPE_ANYTHING).noDefault() + .name(VALUE_FIELD).type(AVRO_TYPE_ANYTHING).noDefault().endRecord().endRecord(); // This is convenient to have extracted; we can't define it before ANYTHING_SCHEMA because it // uses ANYTHING_SCHEMA in its definition. - ANYTHING_SCHEMA_MAP_ELEMENT = ANYTHING_SCHEMA.getField("map").schema() - .getTypes().get(1) // The "map" field is optional, get the schema from the union type - .getElementType(); + ANYTHING_SCHEMA_MAP_ELEMENT = ANYTHING_SCHEMA.getField("map").schema().getTypes().get(1) // The "map" + // field is + // optional, + // get the + // schema + // from the + // union type + .getElementType(); } - // Convert values in Connect form into their logical types. These logical converters are // discovered by logical type names specified in the field - private static final HashMap TO_CONNECT_LOGICAL_CONVERTERS - = new HashMap<>(); + private static final HashMap TO_CONNECT_LOGICAL_CONVERTERS = new HashMap<>(); static { TO_CONNECT_LOGICAL_CONVERTERS.put(Decimal.LOGICAL_NAME, new LogicalTypeConverter() { @@ -191,8 +180,8 @@ public Object convert(Schema schema, Object value) { return Decimal.toLogical(schema, ((ByteBuffer) value).array()); } throw new DataException( - "Invalid type for Decimal, underlying representation should be bytes but was " - + value.getClass()); + "Invalid type for Decimal, underlying representation should be bytes but was " + + value.getClass()); } }); @@ -201,8 +190,8 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof Integer)) { throw new DataException( - "Invalid type for Date, underlying representation should be int32 but was " - + value.getClass()); + "Invalid type for Date, underlying representation should be int32 but was " + + value.getClass()); } return Date.toLogical(schema, (int) value); } @@ -213,8 +202,8 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof Integer)) { throw new DataException( - "Invalid type for Time, underlying representation should be int32 but was " - + value.getClass()); + "Invalid type for Time, underlying representation should be int32 but was " + + value.getClass()); } return Time.toLogical(schema, (int) value); } @@ -225,8 +214,8 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof Long)) { throw new DataException( - "Invalid type for Timestamp, underlying representation should be int64 but was " - + value.getClass()); + "Invalid type for Timestamp, underlying representation should be int64 but was " + + value.getClass()); } return Timestamp.toLogical(schema, (long) value); } @@ -245,8 +234,7 @@ public Object convert(Schema schema, Object value) { static final String CONNECT_AVRO_DECIMAL_PRECISION_PROP = "connect.decimal.precision"; static final Integer CONNECT_AVRO_DECIMAL_PRECISION_DEFAULT = 64; - private static final HashMap TO_AVRO_LOGICAL_CONVERTERS - = new HashMap<>(); + private static final HashMap TO_AVRO_LOGICAL_CONVERTERS = new HashMap<>(); static { TO_AVRO_LOGICAL_CONVERTERS.put(Decimal.LOGICAL_NAME, new LogicalTypeConverter() { @@ -254,7 +242,7 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof BigDecimal)) { throw new DataException( - "Invalid type for Decimal, expected BigDecimal but was " + value.getClass()); + "Invalid type for Decimal, expected BigDecimal but was " + value.getClass()); } return Decimal.fromLogical(schema, (BigDecimal) value); } @@ -276,7 +264,7 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof java.util.Date)) { throw new DataException( - "Invalid type for Time, expected Date but was " + value.getClass()); + "Invalid type for Time, expected Date but was " + value.getClass()); } return Time.fromLogical(schema, (java.util.Date) value); } @@ -287,7 +275,7 @@ public Object convert(Schema schema, Object value) { public Object convert(Schema schema, Object value) { if (!(value instanceof java.util.Date)) { throw new DataException( - "Invalid type for Timestamp, expected Date but was " + value.getClass()); + "Invalid type for Timestamp, expected Date but was " + value.getClass()); } return Timestamp.fromLogical(schema, (java.util.Date) value); } @@ -341,27 +329,21 @@ public int hashCode() { } } - public AvroData(int cacheSize) { - this(new AvroDataConfig.Builder() - .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, cacheSize) - .build()); + this(new AvroDataConfig.Builder().with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, cacheSize).build()); } public AvroData(AvroDataConfig avroDataConfig) { - fromConnectSchemaCache = - new SynchronizedCache<>(new LRUCache<>( - avroDataConfig.getSchemasCacheSize())); - toConnectSchemaCache = - new SynchronizedCache<>(new LRUCache<>( - avroDataConfig.getSchemasCacheSize())); + fromConnectSchemaCache = new SynchronizedCache<>( + new LRUCache<>(avroDataConfig.getSchemasCacheSize())); + toConnectSchemaCache = new SynchronizedCache<>(new LRUCache<>(avroDataConfig.getSchemasCacheSize())); this.connectMetaData = avroDataConfig.isConnectMetaData(); - //this.generalizedSumTypeSupport = avroDataConfig.isGeneralizedSumTypeSupport(); - //this.ignoreDefaultForNullables = avroDataConfig.ignoreDefaultForNullables(); + // this.generalizedSumTypeSupport = avroDataConfig.isGeneralizedSumTypeSupport(); + // this.ignoreDefaultForNullables = avroDataConfig.ignoreDefaultForNullables(); this.enhancedSchemaSupport = avroDataConfig.isEnhancedAvroSchemaSupport(); - //this.scrubInvalidNames = avroDataConfig.isScrubInvalidNames(); - //this.discardTypeDocDefault = avroDataConfig.isDiscardTypeDocDefault(); - //this.allowOptionalMapKey = avroDataConfig.isAllowOptionalMapKeys(); + // this.scrubInvalidNames = avroDataConfig.isScrubInvalidNames(); + // this.discardTypeDocDefault = avroDataConfig.isDiscardTypeDocDefault(); + // this.allowOptionalMapKey = avroDataConfig.isAllowOptionalMapKeys(); } /** @@ -377,32 +359,23 @@ protected Object fromConnectData(Schema schema, org.apache.avro.Schema avroSchem } /** - * Convert from Connect data format to Avro. This version assumes the Avro schema has already - * been converted and makes the use of NonRecordContainer optional + * Convert from Connect data format to Avro. This version assumes the Avro schema has already been + * converted and makes the use of NonRecordContainer optional * - * @param schema the Connect schema - * @param avroSchema the corresponding - * @param logicalValue the Connect data to convert, which may be a value for - * a logical type - * @param requireContainer if true, wrap primitives, maps, and arrays in a - * NonRecordContainer before returning them - * @param requireSchemalessContainerNull if true, use a container representation of null because - * this is part of struct/array/map and we cannot represent - * nulls as true null because Anything cannot be a union - * type; otherwise, this is a top-level value and can return - * null + * @param schema the Connect schema + * @param avroSchema the corresponding + * @param logicalValue the Connect data to convert, which may be a value for a logical type + * @param requireContainer if true, wrap primitives, maps, and arrays in a NonRecordContainer before + * returning them + * @param requireSchemalessContainerNull if true, use a container representation of null because this is + * part of struct/array/map and we cannot represent nulls as true null because Anything cannot + * be a union type; otherwise, this is a top-level value and can return null * @return the converted data */ - private Object fromConnectData( - Schema schema, - org.apache.avro.Schema avroSchema, - Object logicalValue, - boolean requireContainer, - boolean requireSchemalessContainerNull - ) { - Schema.Type schemaType = schema != null - ? schema.type() - : schemaTypeForSchemalessJavaType(logicalValue); + private Object fromConnectData(Schema schema, org.apache.avro.Schema avroSchema, Object logicalValue, + boolean requireContainer, boolean requireSchemalessContainerNull) { + Schema.Type schemaType = schema != null ? schema.type() + : schemaTypeForSchemalessJavaType(logicalValue); if (schemaType == null) { // Schemaless null data since schema is null and we got a null schema type from the value if (requireSchemalessContainerNull) { @@ -438,50 +411,41 @@ private Object fromConnectData( case INT8: { Byte byteValue = (Byte) value; // Check for correct type Integer convertedByteValue = byteValue == null ? null : byteValue.intValue(); - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, convertedByteValue, ANYTHING_SCHEMA_INT_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, convertedByteValue, ANYTHING_SCHEMA_INT_FIELD), + requireContainer); } case INT16: { Short shortValue = (Short) value; // Check for correct type Integer convertedShortValue = shortValue == null ? null : shortValue.intValue(); - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, convertedShortValue, ANYTHING_SCHEMA_INT_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, convertedShortValue, ANYTHING_SCHEMA_INT_FIELD), + requireContainer); } case INT32: Integer intValue = (Integer) value; // Check for correct type - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_INT_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_INT_FIELD), requireContainer); case INT64: Long longValue = (Long) value; // Check for correct type - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_LONG_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_LONG_FIELD), requireContainer); case FLOAT32: Float floatValue = (Float) value; // Check for correct type - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_FLOAT_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_FLOAT_FIELD), + requireContainer); case FLOAT64: Double doubleValue = (Double) value; // Check for correct type - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_DOUBLE_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_DOUBLE_FIELD), + requireContainer); case BOOLEAN: Boolean boolValue = (Boolean) value; // Check for correct type - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_BOOLEAN_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_BOOLEAN_FIELD), + requireContainer); case STRING: if (generalizedSumTypeSupport && ConnectEnum.isEnum(schema)) { String enumSchemaName = schema.parameters().get(GENERALIZED_TYPE_ENUM); @@ -493,14 +457,12 @@ private Object fromConnectData( } else { String stringValue = (String) value; // Check for correct type } - return maybeAddContainer( - avroSchema, + return maybeAddContainer(avroSchema, maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_STRING_FIELD), requireContainer); case BYTES: { - value = value instanceof byte[] ? ByteBuffer.wrap((byte[]) value) : - (ByteBuffer) value; + value = value instanceof byte[] ? ByteBuffer.wrap((byte[]) value) : (ByteBuffer) value; if (schema != null && isFixedSchema(schema)) { int size = Integer.parseInt(schema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP)); org.apache.avro.Schema fixedSchema = null; @@ -510,7 +472,7 @@ private Object fromConnectData( if (memberSchema.getType() == org.apache.avro.Schema.Type.FIXED && memberSchema.getFixedSize() == size && unionMemberFieldName(memberSchema, index) - .equals(unionMemberFieldName(schema, index))) { + .equals(unionMemberFieldName(schema, index))) { fixedSchema = memberSchema; } index++; @@ -521,10 +483,9 @@ && unionMemberFieldName(memberSchema, index) } else { fixedSchema = avroSchema; } - value = new GenericData.Fixed(fixedSchema, ((ByteBuffer)value).array()); + value = new GenericData.Fixed(fixedSchema, ((ByteBuffer) value).array()); } - return maybeAddContainer( - avroSchema, + return maybeAddContainer(avroSchema, maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_BYTES_FIELD), requireContainer); } @@ -536,23 +497,14 @@ && unionMemberFieldName(memberSchema, index) Schema elementSchema = schema != null ? schema.valueSchema() : null; org.apache.avro.Schema underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional( schema, avroSchema, scrubInvalidNames); - org.apache.avro.Schema elementAvroSchema = - schema != null ? underlyingAvroSchema.getElementType() : ANYTHING_SCHEMA; + org.apache.avro.Schema elementAvroSchema = schema != null + ? underlyingAvroSchema.getElementType() : ANYTHING_SCHEMA; for (Object val : list) { - converted.add( - fromConnectData( - elementSchema, - elementAvroSchema, - val, - false, - true - ) - ); + converted.add(fromConnectData(elementSchema, elementAvroSchema, val, false, true)); } - return maybeAddContainer( - avroSchema, - maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_ARRAY_FIELD), - requireContainer); + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_ARRAY_FIELD), + requireContainer); } case MAP: { @@ -562,42 +514,34 @@ && unionMemberFieldName(memberSchema, index) && (!schema.keySchema().isOptional() || allowOptionalMapKey)) { // TODO most types don't need a new converted object since types pass through - underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional( - schema, avroSchema, scrubInvalidNames); + underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional(schema, avroSchema, + scrubInvalidNames); Map converted = new HashMap<>(); for (Map.Entry entry : map.entrySet()) { // Key is a String, no conversion needed Object convertedValue = fromConnectData(schema.valueSchema(), - underlyingAvroSchema.getValueType(), - entry.getValue(), false, true - ); + underlyingAvroSchema.getValueType(), entry.getValue(), false, true); converted.put((String) entry.getKey(), convertedValue); } return maybeAddContainer(avroSchema, converted, requireContainer); } else { List converted = new ArrayList<>(map.size()); underlyingAvroSchema = avroSchemaForUnderlyingMapEntryType(schema, avroSchema); - org.apache.avro.Schema elementSchema = - schema != null - ? underlyingAvroSchema.getElementType() - : ANYTHING_SCHEMA_MAP_ELEMENT; + org.apache.avro.Schema elementSchema = schema != null + ? underlyingAvroSchema.getElementType() : ANYTHING_SCHEMA_MAP_ELEMENT; org.apache.avro.Schema avroKeySchema = elementSchema.getField(KEY_FIELD).schema(); org.apache.avro.Schema avroValueSchema = elementSchema.getField(VALUE_FIELD).schema(); for (Map.Entry entry : map.entrySet()) { Object keyConverted = fromConnectData(schema != null ? schema.keySchema() : null, avroKeySchema, entry.getKey(), false, true); - Object valueConverted = fromConnectData(schema != null ? schema.valueSchema() : null, - avroValueSchema, entry.getValue(), false, - true); - converted.add( - new GenericRecordBuilder(elementSchema) - .set(KEY_FIELD, keyConverted) - .set(VALUE_FIELD, valueConverted) - .build() - ); + Object valueConverted = fromConnectData( + schema != null ? schema.valueSchema() : null, avroValueSchema, + entry.getValue(), false, true); + converted.add(new GenericRecordBuilder(elementSchema).set(KEY_FIELD, keyConverted) + .set(VALUE_FIELD, valueConverted).build()); } - return maybeAddContainer( - avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), + return maybeAddContainer(avroSchema, + maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), requireContainer); } } @@ -607,37 +551,30 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), if (!struct.schema().equals(schema)) { throw new DataException("Mismatching struct schema"); } - //This handles the inverting of a union which is held as a struct, where each field is + // This handles the inverting of a union which is held as a struct, where each field is // one of the union types. if (isUnionSchema(schema)) { for (Field field : schema.fields()) { - Object object = ignoreDefaultForNullables - ? struct.getWithoutDefault(field.name()) : struct.get(field); + Object object = ignoreDefaultForNullables ? struct.getWithoutDefault(field.name()) + : struct.get(field); if (object != null) { - return fromConnectData( - field.schema(), - avroSchema, - object, - false, - true - ); + return fromConnectData(field.schema(), avroSchema, object, false, true); } } return fromConnectData(schema, avroSchema, null, false, true); } else { org.apache.avro.Schema underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional( schema, avroSchema, scrubInvalidNames); - GenericRecordBuilder convertedBuilder = new GenericRecordBuilder(underlyingAvroSchema); + GenericRecordBuilder convertedBuilder = new GenericRecordBuilder( + underlyingAvroSchema); for (Field field : schema.fields()) { String fieldName = scrubName(field.name(), scrubInvalidNames); org.apache.avro.Schema.Field theField = underlyingAvroSchema.getField(fieldName); org.apache.avro.Schema fieldAvroSchema = theField.schema(); Object fieldValue = ignoreDefaultForNullables - ? struct.getWithoutDefault(field.name()) : struct.get(field); - convertedBuilder.set( - fieldName, - fromConnectData(field.schema(), fieldAvroSchema, fieldValue, false, true) - ); + ? struct.getWithoutDefault(field.name()) : struct.get(field); + convertedBuilder.set(fieldName, fromConnectData(field.schema(), fieldAvroSchema, + fieldValue, false, true)); } return convertedBuilder.build(); } @@ -651,8 +588,8 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), } } - private GenericData.EnumSymbol enumSymbol( - org.apache.avro.Schema avroSchema, Object value, String enumSchemaName) { + private GenericData.EnumSymbol enumSymbol(org.apache.avro.Schema avroSchema, Object value, + String enumSchemaName) { org.apache.avro.Schema enumSchema; if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { int enumIndex = avroSchema.getIndexNamed(enumSchemaName); @@ -664,33 +601,30 @@ private GenericData.EnumSymbol enumSymbol( } /** - * MapEntry types in connect Schemas are represented as Arrays of record. - * Return the array type from the union instead of the union itself. + * MapEntry types in connect Schemas are represented as Arrays of record. Return the array type from the + * union instead of the union itself. */ - private static org.apache.avro.Schema avroSchemaForUnderlyingMapEntryType( - Schema schema, - org.apache.avro.Schema avroSchema) { + private static org.apache.avro.Schema avroSchemaForUnderlyingMapEntryType(Schema schema, + org.apache.avro.Schema avroSchema) { if (schema != null && schema.isOptional()) { if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { for (org.apache.avro.Schema typeSchema : avroSchema.getTypes()) { if (!typeSchema.getType().equals(org.apache.avro.Schema.Type.NULL) - && Schema.Type.ARRAY.getName().equals(typeSchema.getType().getName())) { + && Schema.Type.ARRAY.getName().equals(typeSchema.getType().getName())) { return typeSchema; } } } else { throw new DataException( - "An optional schema should have an Avro Union type, not " - + schema.type()); + "An optional schema should have an Avro Union type, not " + schema.type()); } } return avroSchema; } private static boolean crossReferenceSchemaNames(final Schema schema, - final org.apache.avro.Schema avroSchema, - final boolean scrubInvalidNames) { + final org.apache.avro.Schema avroSchema, final boolean scrubInvalidNames) { String fullName = scrubFullName(schema.name(), scrubInvalidNames); return Objects.equals(avroSchema.getFullName(), fullName) || Objects.equals(avroSchema.getType().getName(), schema.type().getName()) @@ -698,16 +632,15 @@ private static boolean crossReferenceSchemaNames(final Schema schema, } /** - * Connect optional fields are represented as a unions (null & type) in Avro - * Return the Avro schema of the actual type in the Union (instead of the union itself) + * Connect optional fields are represented as a unions (null & type) in Avro Return the Avro schema of the + * actual type in the Union (instead of the union itself) */ - private static org.apache.avro.Schema avroSchemaForUnderlyingTypeIfOptional( - Schema schema, org.apache.avro.Schema avroSchema, boolean scrubInvalidNames) { + private static org.apache.avro.Schema avroSchemaForUnderlyingTypeIfOptional(Schema schema, + org.apache.avro.Schema avroSchema, boolean scrubInvalidNames) { if (schema != null && schema.isOptional()) { if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { - for (org.apache.avro.Schema typeSchema : avroSchema - .getTypes()) { + for (org.apache.avro.Schema typeSchema : avroSchema.getTypes()) { if (!typeSchema.getType().equals(org.apache.avro.Schema.Type.NULL) && crossReferenceSchemaNames(schema, typeSchema, scrubInvalidNames)) { return typeSchema; @@ -715,8 +648,7 @@ && crossReferenceSchemaNames(schema, typeSchema, scrubInvalidNames)) { } } else { throw new DataException( - "An optinal schema should have an Avro Union type, not " - + schema.type()); + "An optinal schema should have an Avro Union type, not " + schema.type()); } } return avroSchema; @@ -771,7 +703,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema) { } public org.apache.avro.Schema fromConnectSchema(Schema schema, - Map schemaMap) { + Map schemaMap) { if (schema == null) { return ANYTHING_SCHEMA; } @@ -788,20 +720,18 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, } /** - * SchemaMap is a map of already resolved internal schemas, this avoids type re-declaration if a - * type is reused, this actually blows up if you don't do this and have a type used in multiple - * places. - * - *

Also it only holds reference the non-optional schemas as technically an optional is - * actually a union of null and the non-opitonal, which if used in multiple places some optional - * some non-optional will cause error as you redefine type. - * - *

This is different to the global schema cache which is used to hold/cache fully resolved - * schemas used to avoid re-resolving when presented with the same source schema. + * SchemaMap is a map of already resolved internal schemas, this avoids type re-declaration if a type is + * reused, this actually blows up if you don't do this and have a type used in multiple places. + *

+ * Also it only holds reference the non-optional schemas as technically an optional is actually a union of + * null and the non-opitonal, which if used in multiple places some optional some non-optional will cause + * error as you redefine type. + *

+ * This is different to the global schema cache which is used to hold/cache fully resolved schemas used to + * avoid re-resolving when presented with the same source schema. */ - public org.apache.avro.Schema fromConnectSchema(Schema schema, - FromConnectContext fromConnectContext, - boolean ignoreOptional) { + public org.apache.avro.Schema fromConnectSchema(Schema schema, FromConnectContext fromConnectContext, + boolean ignoreOptional) { if (schema == null) { return ANYTHING_SCHEMA; } @@ -842,10 +772,9 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, baseSchema = org.apache.avro.SchemaBuilder.builder().booleanType(); break; case STRING: - if ((generalizedSumTypeSupport || enhancedSchemaSupport) - && schema.parameters() != null + if ((generalizedSumTypeSupport || enhancedSchemaSupport) && schema.parameters() != null && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) - || schema.parameters().containsKey(AVRO_TYPE_ENUM))) { + || schema.parameters().containsKey(AVRO_TYPE_ENUM))) { String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; List symbols = new ArrayList<>(); for (Map.Entry entry : schema.parameters().entrySet()) { @@ -860,13 +789,12 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, String enumDoc = schema.parameters().get(AVRO_ENUM_DOC_PREFIX_PROP + name); String enumDefault = schema.parameters().get(AVRO_ENUM_DEFAULT_PREFIX_PROP + name); baseSchema = discardTypeDocDefault - ? org.apache.avro.SchemaBuilder.builder().enumeration(enumName) - .doc(schema.parameters().get(CONNECT_ENUM_DOC_PROP)) - .symbols(symbols.toArray(new String[symbols.size()])) - : org.apache.avro.SchemaBuilder.builder().enumeration(enumName) - .doc(enumDoc) - .defaultSymbol(enumDefault) - .symbols(symbols.toArray(new String[symbols.size()])); + ? org.apache.avro.SchemaBuilder.builder().enumeration(enumName) + .doc(schema.parameters().get(CONNECT_ENUM_DOC_PROP)) + .symbols(symbols.toArray(new String[symbols.size()])) + : org.apache.avro.SchemaBuilder.builder().enumeration(enumName).doc(enumDoc) + .defaultSymbol(enumDefault) + .symbols(symbols.toArray(new String[symbols.size()])); } else { baseSchema = org.apache.avro.SchemaBuilder.builder().stringType(); } @@ -876,9 +804,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, Pair names = getNameOrDefault(fromConnectContext, schema.name()); String namespace = names.getKey(); String name = names.getValue(); - baseSchema = org.apache.avro.SchemaBuilder.builder() - .fixed(name) - .namespace(namespace) + baseSchema = org.apache.avro.SchemaBuilder.builder().fixed(name).namespace(namespace) .size(Integer.parseInt(schema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP))); } else { baseSchema = org.apache.avro.SchemaBuilder.builder().bytesType(); @@ -891,15 +817,14 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, int precision = Integer.parseInt(precisionValue); baseSchema.addProp(AVRO_LOGICAL_DECIMAL_PRECISION_PROP, new IntNode(precision)); } else { - baseSchema - .addProp(AVRO_LOGICAL_DECIMAL_PRECISION_PROP, - new IntNode(CONNECT_AVRO_DECIMAL_PRECISION_DEFAULT)); + baseSchema.addProp(AVRO_LOGICAL_DECIMAL_PRECISION_PROP, + new IntNode(CONNECT_AVRO_DECIMAL_PRECISION_DEFAULT)); } } break; case ARRAY: baseSchema = org.apache.avro.SchemaBuilder.builder().array() - .items(fromConnectSchemaWithCycle(schema.valueSchema(), fromConnectContext, false)); + .items(fromConnectSchemaWithCycle(schema.valueSchema(), fromConnectContext, false)); break; case MAP: // Avro only supports string keys, so we match the representation when possible, but @@ -913,12 +838,8 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, List fields = new ArrayList<>(); final org.apache.avro.Schema mapSchema; if (schema.name() == null) { - mapSchema = org.apache.avro.Schema.createRecord( - MAP_ENTRY_TYPE_NAME, - null, - NAMESPACE, - false - ); + mapSchema = org.apache.avro.Schema.createRecord(MAP_ENTRY_TYPE_NAME, null, NAMESPACE, + false); } else { Pair names = getNameOrDefault(fromConnectContext, schema.name()); String namespace = names.getKey(); @@ -926,18 +847,8 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, mapSchema = org.apache.avro.Schema.createRecord(name, null, namespace, false); mapSchema.addProp(CONNECT_INTERNAL_TYPE_NAME, MAP_ENTRY_TYPE_NAME); } - addAvroRecordField( - fields, - KEY_FIELD, - schema.keySchema(), - null, - fromConnectContext); - addAvroRecordField( - fields, - VALUE_FIELD, - schema.valueSchema(), - null, - fromConnectContext); + addAvroRecordField(fields, KEY_FIELD, schema.keySchema(), null, fromConnectContext); + addAvroRecordField(fields, VALUE_FIELD, schema.valueSchema(), null, fromConnectContext); mapSchema.setFields(fields); baseSchema = org.apache.avro.Schema.createArray(mapSchema); } @@ -949,24 +860,24 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, unionSchemas.add(org.apache.avro.SchemaBuilder.builder().nullType()); } for (Field field : schema.fields()) { - unionSchemas.add( - fromConnectSchemaWithCycle(nonOptional(field.schema()), fromConnectContext, true)); + unionSchemas.add(fromConnectSchemaWithCycle(nonOptional(field.schema()), + fromConnectContext, true)); } baseSchema = org.apache.avro.Schema.createUnion(unionSchemas); } else if (schema.isOptional()) { List unionSchemas = new ArrayList<>(); unionSchemas.add(org.apache.avro.SchemaBuilder.builder().nullType()); - unionSchemas.add( - fromConnectSchemaWithCycle(nonOptional(schema), fromConnectContext, false)); + unionSchemas + .add(fromConnectSchemaWithCycle(nonOptional(schema), fromConnectContext, false)); baseSchema = org.apache.avro.Schema.createUnion(unionSchemas); } else { Pair names = getNameOrDefault(fromConnectContext, schema.name()); String namespace = names.getKey(); String name = names.getValue(); String doc = schema.parameters() != null - ? schema.parameters() - .get(discardTypeDocDefault ? CONNECT_RECORD_DOC_PROP : AVRO_RECORD_DOC_PROP) - : null; + ? schema.parameters() + .get(discardTypeDocDefault ? CONNECT_RECORD_DOC_PROP : AVRO_RECORD_DOC_PROP) + : null; baseSchema = org.apache.avro.Schema.createRecord(name, doc, namespace, false); if (schema.name() != null) { fromConnectContext.cycleReferences.put(schema.name(), baseSchema); @@ -1026,18 +937,17 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, if (Decimal.LOGICAL_NAME.equalsIgnoreCase(schema.name())) { String precisionString = schema.parameters().get(CONNECT_AVRO_DECIMAL_PRECISION_PROP); String scaleString = schema.parameters().get(Decimal.SCALE_FIELD); - int precision = precisionString == null ? CONNECT_AVRO_DECIMAL_PRECISION_DEFAULT : - Integer.parseInt(precisionString); + int precision = precisionString == null ? CONNECT_AVRO_DECIMAL_PRECISION_DEFAULT + : Integer.parseInt(precisionString); int scale = scaleString == null ? 0 : Integer.parseInt(scaleString); if (scale < 0 || scale > precision) { log.trace( "Scale and precision of {} and {} cannot be serialized as native Avro logical " + "decimal type; reverting to legacy serialization method", - scale, - precision - ); + scale, precision); // We cannot use the Avro Java library's support for the decimal logical type when the - // scale is either negative or greater than the precision as this violates the Avro spec + // scale is either negative or greater than the precision as this violates the Avro + // spec // and causes the Avro library to throw an exception, so we fall back in this case to // using the legacy method for encoding decimal logical type information. // Can't add a key/value pair with the CONNECT_AVRO_DECIMAL_PRECISION_PROP key to the @@ -1059,7 +969,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, // Initially, to add support for logical types a new property was added // with key `logicalType`. This enabled logical types for avro schemas but not others, // such as parquet. The use of 'addToSchema` above supersedes this method here, - // which should eventually be removed. + // which should eventually be removed. // Keeping for backwards compatibility until a major version upgrade happens. // Below follows the older method of supporting logical types via properties. @@ -1072,7 +982,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, if (schema.name() != null) { if (Decimal.LOGICAL_NAME.equalsIgnoreCase(schema.name()) && (schema.parameters().containsKey(CONNECT_AVRO_DECIMAL_PRECISION_PROP) - || forceLegacyDecimal)) { + || forceLegacyDecimal)) { baseSchema.addProp(AVRO_LOGICAL_TYPE_PROP, AVRO_LOGICAL_DECIMAL); } else if (Time.LOGICAL_NAME.equalsIgnoreCase(schema.name())) { baseSchema.addProp(AVRO_LOGICAL_TYPE_PROP, AVRO_LOGICAL_TIME_MILLIS); @@ -1115,20 +1025,15 @@ private Pair getNameOrDefault(FromConnectContext ctx, String nam } } - private org.apache.avro.Schema maybeMakeOptional( - Schema schema, org.apache.avro.Schema baseSchema) { + private org.apache.avro.Schema maybeMakeOptional(Schema schema, org.apache.avro.Schema baseSchema) { if (!schema.isOptional()) { return baseSchema; } if (schema.defaultValue() != null) { - return org.apache.avro.SchemaBuilder.builder().unionOf() - .type(baseSchema).and() - .nullType() + return org.apache.avro.SchemaBuilder.builder().unionOf().type(baseSchema).and().nullType() .endUnion(); } else { - return org.apache.avro.SchemaBuilder.builder().unionOf() - .nullType().and() - .type(baseSchema) + return org.apache.avro.SchemaBuilder.builder().unionOf().nullType().and().type(baseSchema) .endUnion(); } } @@ -1161,7 +1066,7 @@ protected static String doScrubName(String name) { } String encoded = URLEncoder.encode(name, "UTF-8"); if (!NAME_START_CHAR.matcher(encoded).lookingAt()) { - encoded = "x" + encoded; // use an arbitrary valid prefix + encoded = "x" + encoded; // use an arbitrary valid prefix } encoded = NAME_INVALID_CHARS.matcher(encoded).replaceAll("_"); return encoded; @@ -1170,8 +1075,7 @@ protected static String doScrubName(String name) { } } - public org.apache.avro.Schema fromConnectSchemaWithCycle( - Schema schema, + public org.apache.avro.Schema fromConnectSchemaWithCycle(Schema schema, FromConnectContext fromConnectContext, boolean ignoreOptional) { org.apache.avro.Schema resolvedSchema; if (fromConnectContext.cycleReferences.containsKey(schema.name())) { @@ -1185,38 +1089,29 @@ public org.apache.avro.Schema fromConnectSchemaWithCycle( return resolvedSchema; } - private void addAvroRecordField( - List fields, - String fieldName, Schema fieldSchema, - String fieldDoc, - FromConnectContext fromConnectContext) { + private void addAvroRecordField(List fields, String fieldName, + Schema fieldSchema, String fieldDoc, FromConnectContext fromConnectContext) { Object defaultVal = null; if (fieldSchema.defaultValue() != null) { - defaultVal = JacksonUtils.toObject( - defaultValueFromConnect(fieldSchema, fieldSchema.defaultValue())); + defaultVal = JacksonUtils + .toObject(defaultValueFromConnect(fieldSchema, fieldSchema.defaultValue())); } else if (fieldSchema.isOptional()) { defaultVal = JsonProperties.NULL_VALUE; } org.apache.avro.Schema.Field field; org.apache.avro.Schema schema = fromConnectSchema(fieldSchema, fromConnectContext, false); try { - field = new org.apache.avro.Schema.Field( - fieldName, - schema, - discardTypeDocDefault ? fieldSchema.doc() : fieldDoc, - defaultVal); + field = new org.apache.avro.Schema.Field(fieldName, schema, + discardTypeDocDefault ? fieldSchema.doc() : fieldDoc, defaultVal); } catch (AvroTypeException e) { - field = new org.apache.avro.Schema.Field( - fieldName, - schema, + field = new org.apache.avro.Schema.Field(fieldName, schema, discardTypeDocDefault ? fieldSchema.doc() : fieldDoc); log.warn("Ignoring invalid default for field " + fieldName, e); } fields.add(field); } - private static Object toAvroLogical(Schema schema, Object value) { if (schema != null && schema.name() != null) { LogicalTypeConverter logicalConverter = TO_AVRO_LOGICAL_CONVERTERS.get(schema.name()); @@ -1271,11 +1166,11 @@ private JsonNode defaultValueFromConnect(Schema schema, Object value) { return JsonNodeFactory.instance.textNode((String) defaultVal); case BYTES: if (defaultVal instanceof byte[]) { - return JsonNodeFactory.instance.textNode(new String((byte[]) defaultVal, - StandardCharsets.ISO_8859_1)); + return JsonNodeFactory.instance + .textNode(new String((byte[]) defaultVal, StandardCharsets.ISO_8859_1)); } else { - return JsonNodeFactory.instance.textNode(new String(((ByteBuffer) defaultVal).array(), - StandardCharsets.ISO_8859_1)); + return JsonNodeFactory.instance.textNode( + new String(((ByteBuffer) defaultVal).array(), StandardCharsets.ISO_8859_1)); } case ARRAY: { ArrayNode array = JsonNodeFactory.instance.arrayNode(); @@ -1288,16 +1183,20 @@ private JsonNode defaultValueFromConnect(Schema schema, Object value) { if (schema.keySchema().type() == Schema.Type.STRING && (!schema.keySchema().isOptional() || allowOptionalMapKey)) { ObjectNode node = JsonNodeFactory.instance.objectNode(); - for (Map.Entry entry : ((Map) defaultVal).entrySet()) { - JsonNode entryDef = defaultValueFromConnect(schema.valueSchema(), entry.getValue()); + for (Map.Entry entry : ((Map) defaultVal) + .entrySet()) { + JsonNode entryDef = defaultValueFromConnect(schema.valueSchema(), + entry.getValue()); node.put(entry.getKey(), entryDef); } return node; } else { ArrayNode array = JsonNodeFactory.instance.arrayNode(); - for (Map.Entry entry : ((Map) defaultVal).entrySet()) { + for (Map.Entry entry : ((Map) defaultVal) + .entrySet()) { JsonNode keyDefault = defaultValueFromConnect(schema.keySchema(), entry.getKey()); - JsonNode valDefault = defaultValueFromConnect(schema.valueSchema(), entry.getValue()); + JsonNode valDefault = defaultValueFromConnect(schema.valueSchema(), + entry.getValue()); ArrayNode jsonEntry = JsonNodeFactory.instance.arrayNode(); jsonEntry.add(keyDefault); jsonEntry.add(valDefault); @@ -1323,14 +1222,11 @@ private JsonNode defaultValueFromConnect(Schema schema, Object value) { throw new DataException("Unknown schema type:" + schema.type()); } } catch (ClassCastException e) { - throw new DataException("Invalid type used for default value of " - + schema.type() - + " field: " - + schema.defaultValue().getClass()); + throw new DataException("Invalid type used for default value of " + schema.type() + " field: " + + schema.defaultValue().getClass()); } } - private JsonNode parametersFromConnect(Map params) { ObjectNode result = JsonNodeFactory.instance.objectNode(); for (Map.Entry entry : params.entrySet()) { @@ -1351,8 +1247,7 @@ private boolean isMapEntry(final org.apache.avro.Schema elemSchema) { if (!elemSchema.getType().equals(org.apache.avro.Schema.Type.RECORD)) { return false; } - if (NAMESPACE.equals(elemSchema.getNamespace()) - && MAP_ENTRY_TYPE_NAME.equals(elemSchema.getName())) { + if (NAMESPACE.equals(elemSchema.getNamespace()) && MAP_ENTRY_TYPE_NAME.equals(elemSchema.getName())) { return true; } if (Objects.equals(elemSchema.getProp(CONNECT_INTERNAL_TYPE_NAME), MAP_ENTRY_TYPE_NAME)) { @@ -1365,7 +1260,7 @@ private boolean isMapEntry(final org.apache.avro.Schema elemSchema) { * Convert the given object, in Avro format, into a Connect data object. * * @param avroSchema the Avro schema - * @param value the value to convert into a Connect data object + * @param value the value to convert into a Connect data object * @return the Connect schema and value */ public SchemaAndValue toConnectData(org.apache.avro.Schema avroSchema, Object value) { @@ -1376,20 +1271,18 @@ public SchemaAndValue toConnectData(org.apache.avro.Schema avroSchema, Object va * Convert the given object, in Avro format, into a Connect data object. * * @param avroSchema the Avro schema - * @param value the value to convert into a Connect data object - * @param version the version to set on the Connect schema if the avroSchema does not have a - * property named "connect.version", may be null + * @param value the value to convert into a Connect data object + * @param version the version to set on the Connect schema if the avroSchema does not have a property + * named "connect.version", may be null * @return the Connect schema and value */ - public SchemaAndValue toConnectData(org.apache.avro.Schema avroSchema, Object value, - Integer version) { + public SchemaAndValue toConnectData(org.apache.avro.Schema avroSchema, Object value, Integer version) { if (value == null) { return null; } ToConnectContext toConnectContext = new ToConnectContext(); - Schema schema = (avroSchema.equals(ANYTHING_SCHEMA)) - ? null - : toConnectSchema(avroSchema, version, toConnectContext); + Schema schema = (avroSchema.equals(ANYTHING_SCHEMA)) ? null + : toConnectSchema(avroSchema, version, toConnectContext); return new SchemaAndValue(schema, toConnectData(schema, value, toConnectContext)); } @@ -1398,7 +1291,7 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon } private Object toConnectData(Schema schema, Object value, ToConnectContext toConnectContext, - boolean doLogicalConversion) { + boolean doLogicalConversion) { validateSchemaValue(schema, value); if (value == null || value == JsonProperties.NULL_VALUE) { return null; @@ -1411,30 +1304,24 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon } IndexedRecord recordValue = (IndexedRecord) value; - Object - boolVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_BOOLEAN_FIELD).pos()); + Object boolVal = recordValue + .get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_BOOLEAN_FIELD).pos()); if (boolVal != null) { return toConnectData(Schema.BOOLEAN_SCHEMA, boolVal, toConnectContext); } - Object - bytesVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_BYTES_FIELD).pos()); + Object bytesVal = recordValue + .get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_BYTES_FIELD).pos()); if (bytesVal != null) { return toConnectData(Schema.BYTES_SCHEMA, bytesVal, toConnectContext); } - Object - dblVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_DOUBLE_FIELD).pos()); + Object dblVal = recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_DOUBLE_FIELD).pos()); if (dblVal != null) { return toConnectData(Schema.FLOAT64_SCHEMA, dblVal, toConnectContext); } - Object - fltVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_FLOAT_FIELD).pos()); + Object fltVal = recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_FLOAT_FIELD).pos()); if (fltVal != null) { return toConnectData(Schema.FLOAT32_SCHEMA, fltVal, toConnectContext); } @@ -1444,31 +1331,26 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon return toConnectData(Schema.INT32_SCHEMA, intVal, toConnectContext); } - Object - longVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_LONG_FIELD).pos()); + Object longVal = recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_LONG_FIELD).pos()); if (longVal != null) { return toConnectData(Schema.INT64_SCHEMA, longVal, toConnectContext); } - Object - stringVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_STRING_FIELD).pos()); + Object stringVal = recordValue + .get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_STRING_FIELD).pos()); if (stringVal != null) { return toConnectData(Schema.STRING_SCHEMA, stringVal, toConnectContext); } - Object - arrayVal = - recordValue.get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_ARRAY_FIELD).pos()); + Object arrayVal = recordValue + .get(ANYTHING_SCHEMA.getField(ANYTHING_SCHEMA_ARRAY_FIELD).pos()); if (arrayVal != null) { // We cannot reuse the logic like we do in other cases because it is not possible to // construct an array schema with a null item schema, but the items have no schema. if (!(arrayVal instanceof Collection)) { throw new DataException( "Expected a Collection for schemaless array field but found a " - + arrayVal.getClass().getName() - ); + + arrayVal.getClass().getName()); } Collection original = (Collection) arrayVal; List result = new ArrayList<>(original.size()); @@ -1483,20 +1365,18 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon // We cannot reuse the logic like we do in other cases because it is not possible to // construct a map schema with a null item schema, but the items have no schema. if (!(mapVal instanceof Collection)) { - throw new DataException( - "Expected a List for schemaless map field but found a " - + mapVal.getClass().getName() - ); + throw new DataException("Expected a List for schemaless map field but found a " + + mapVal.getClass().getName()); } Collection original = (Collection) mapVal; Map result = new HashMap<>(original.size()); for (IndexedRecord entry : original) { int avroKeyFieldIndex = entry.getSchema().getField(KEY_FIELD).pos(); int avroValueFieldIndex = entry.getSchema().getField(VALUE_FIELD).pos(); - Object convertedKey = toConnectData( - null, entry.get(avroKeyFieldIndex), toConnectContext); - Object convertedValue = toConnectData( - null, entry.get(avroValueFieldIndex), toConnectContext); + Object convertedKey = toConnectData(null, entry.get(avroKeyFieldIndex), + toConnectContext); + Object convertedValue = toConnectData(null, entry.get(avroValueFieldIndex), + toConnectContext); result.put(convertedKey, convertedValue); } return result; @@ -1547,13 +1427,12 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon case STRING: if (value instanceof String) { converted = value; - } else if (value instanceof CharSequence - || value instanceof GenericEnumSymbol - || value instanceof Enum) { + } else if (value instanceof CharSequence || value instanceof GenericEnumSymbol + || value instanceof Enum) { converted = value.toString(); } else { throw new DataException("Invalid class for string type, expecting String or " - + "CharSequence but found " + value.getClass()); + + "CharSequence but found " + value.getClass()); } break; @@ -1565,8 +1444,9 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon } else if (value instanceof GenericFixed) { converted = ByteBuffer.wrap(((GenericFixed) value).bytes()); } else { - throw new DataException("Invalid class for bytes type, expecting byte[] or ByteBuffer " - + "but found " + value.getClass()); + throw new DataException( + "Invalid class for bytes type, expecting byte[] or ByteBuffer " + "but found " + + value.getClass()); } break; @@ -1584,14 +1464,14 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon case MAP: { Schema keySchema = schema.keySchema(); Schema valueSchema = schema.valueSchema(); - if (keySchema != null && keySchema.type() == Schema.Type.STRING && !keySchema - .isOptional()) { + if (keySchema != null && keySchema.type() == Schema.Type.STRING + && !keySchema.isOptional()) { // Non-optional string keys Map original = (Map) value; Map result = new HashMap<>(original.size()); for (Map.Entry entry : original.entrySet()) { result.put(entry.getKey().toString(), - toConnectData(valueSchema, entry.getValue(), toConnectContext)); + toConnectData(valueSchema, entry.getValue(), toConnectContext)); } converted = result; } else { @@ -1601,10 +1481,10 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon for (IndexedRecord entry : original) { int avroKeyFieldIndex = entry.getSchema().getField(KEY_FIELD).pos(); int avroValueFieldIndex = entry.getSchema().getField(VALUE_FIELD).pos(); - Object convertedKey = toConnectData( - keySchema, entry.get(avroKeyFieldIndex), toConnectContext); - Object convertedValue = toConnectData( - valueSchema, entry.get(avroValueFieldIndex), toConnectContext); + Object convertedKey = toConnectData(keySchema, entry.get(avroKeyFieldIndex), + toConnectContext); + Object convertedValue = toConnectData(valueSchema, entry.get(avroValueFieldIndex), + toConnectContext); result.put(convertedKey, convertedValue); } converted = result; @@ -1618,16 +1498,16 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon Schema valueRecordSchema = null; if (value instanceof IndexedRecord) { IndexedRecord valueRecord = ((IndexedRecord) value); - valueRecordSchema = toConnectSchemaWithCycles( - valueRecord.getSchema(), true, null, null, toConnectContext); + valueRecordSchema = toConnectSchemaWithCycles(valueRecord.getSchema(), true, null, + null, toConnectContext); } int index = 0; for (Field field : schema.fields()) { Schema fieldSchema = field.schema(); if (isInstanceOfAvroSchemaTypeForSimpleSchema(fieldSchema, value, index) - || (valueRecordSchema != null && schemaEquals(valueRecordSchema, fieldSchema))) { - converted = new Struct(schema).put( - unionMemberFieldName(fieldSchema, index), + || (valueRecordSchema != null + && schemaEquals(valueRecordSchema, fieldSchema))) { + converted = new Struct(schema).put(unionMemberFieldName(fieldSchema, index), toConnectData(fieldSchema, value, toConnectContext)); break; } @@ -1654,8 +1534,8 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon for (Field field : schema.fields()) { String fieldName = scrubName(field.name()); int avroFieldIndex = original.getSchema().getField(fieldName).pos(); - Object convertedFieldValue = - toConnectData(field.schema(), original.get(avroFieldIndex), toConnectContext); + Object convertedFieldValue = toConnectData(field.schema(), + original.get(avroFieldIndex), toConnectContext); result.put(field, convertedFieldValue); } converted = result; @@ -1688,10 +1568,8 @@ public Schema toConnectSchema(org.apache.avro.Schema schema) { return toConnectSchema(schema, null, new ToConnectContext()); } - - private Schema toConnectSchema(org.apache.avro.Schema schema, - Integer version, - ToConnectContext toConnectContext) { + private Schema toConnectSchema(org.apache.avro.Schema schema, Integer version, + ToConnectContext toConnectContext) { // We perform caching only at this top level. While it might be helpful to cache some more of // the internal conversions, this is the safest place to add caching since some of the internal @@ -1709,41 +1587,32 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, return cachedSchema; } - Schema resultSchema = toConnectSchema(schema, getForceOptionalDefault(), null, - null, version, toConnectContext); + Schema resultSchema = toConnectSchema(schema, getForceOptionalDefault(), null, null, version, + toConnectContext); toConnectSchemaCache.put(schemaAndVersion, resultSchema); return resultSchema; } /** - * @param schema schema to convert - * @param forceOptional make the resulting schema optional, for converting Avro unions to a - * record format and simple Avro unions of null + type to optional schemas - * @param fieldDefaultVal if non-null, override any connect-annotated default values with this - * one; used when converting Avro record fields since they define default - * values with the field spec, but Connect specifies them with the field's - * schema - * @param docDefaultVal if non-null, override any connect-annotated documentation with this - * one; - * used when converting Avro record fields since they define doc values + * @param schema schema to convert + * @param forceOptional make the resulting schema optional, for converting Avro unions to a record format + * and simple Avro unions of null + type to optional schemas + * @param fieldDefaultVal if non-null, override any connect-annotated default values with this one; used + * when converting Avro record fields since they define default values with the field spec, but + * Connect specifies them with the field's schema + * @param docDefaultVal if non-null, override any connect-annotated documentation with this one; used when + * converting Avro record fields since they define doc values * @param toConnectContext context object that holds state while doing the conversion */ - private Schema toConnectSchema(org.apache.avro.Schema schema, - boolean forceOptional, - Object fieldDefaultVal, - String docDefaultVal, - ToConnectContext toConnectContext) { - return toConnectSchema( - schema, forceOptional, fieldDefaultVal, docDefaultVal, null, toConnectContext); + private Schema toConnectSchema(org.apache.avro.Schema schema, boolean forceOptional, + Object fieldDefaultVal, String docDefaultVal, ToConnectContext toConnectContext) { + return toConnectSchema(schema, forceOptional, fieldDefaultVal, docDefaultVal, null, toConnectContext); } - private Schema toConnectSchema(org.apache.avro.Schema schema, - boolean forceOptional, - Object fieldDefaultVal, - String docDefaultVal, - Integer version, - ToConnectContext toConnectContext) { + private Schema toConnectSchema(org.apache.avro.Schema schema, boolean forceOptional, + Object fieldDefaultVal, String docDefaultVal, Integer version, + ToConnectContext toConnectContext) { String type = schema.getProp(CONNECT_TYPE_PROP); String logicalType = schema.getProp(AVRO_LOGICAL_TYPE_PROP); @@ -1765,9 +1634,9 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, Object precisionNode = schema.getObjectProp(AVRO_LOGICAL_DECIMAL_PRECISION_PROP); if (null != precisionNode) { if (!(precisionNode instanceof Number)) { - throw new DataException(AVRO_LOGICAL_DECIMAL_PRECISION_PROP - + " property must be a JSON Integer." - + " https://avro.apache.org/docs/1.9.1/spec.html#Decimal"); + throw new DataException( + AVRO_LOGICAL_DECIMAL_PRECISION_PROP + " property must be a JSON Integer." + + " https://avro.apache.org/docs/1.9.1/spec.html#Decimal"); } // Capture the precision as a parameter only if it is not the default int precision = ((Number) precisionNode).intValue(); @@ -1823,30 +1692,23 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, org.apache.avro.Schema elemSchema = schema.getElementType(); // Special case for custom encoding of non-string maps as list of key-value records if (isMapEntry(elemSchema)) { - if (elemSchema.getFields().size() != 2 - || elemSchema.getField(KEY_FIELD) == null - || elemSchema.getField(VALUE_FIELD) == null) { + if (elemSchema.getFields().size() != 2 || elemSchema.getField(KEY_FIELD) == null + || elemSchema.getField(VALUE_FIELD) == null) { throw new DataException("Found map encoded as array of key-value pairs, but array " - + "elements do not match the expected format."); + + "elements do not match the expected format."); } - builder = SchemaBuilder.map( - toConnectSchema(elemSchema.getField(KEY_FIELD).schema()), - toConnectSchema(elemSchema.getField(VALUE_FIELD).schema()) - ); + builder = SchemaBuilder.map(toConnectSchema(elemSchema.getField(KEY_FIELD).schema()), + toConnectSchema(elemSchema.getField(VALUE_FIELD).schema())); } else { - Schema arraySchema = toConnectSchemaWithCycles( - schema.getElementType(), getForceOptionalDefault(), - null, null, toConnectContext); + Schema arraySchema = toConnectSchemaWithCycles(schema.getElementType(), + getForceOptionalDefault(), null, null, toConnectContext); builder = SchemaBuilder.array(arraySchema); } break; case MAP: - builder = SchemaBuilder.map( - Schema.STRING_SCHEMA, - toConnectSchemaWithCycles(schema.getValueType(), getForceOptionalDefault(), - null, null, toConnectContext) - ); + builder = SchemaBuilder.map(Schema.STRING_SCHEMA, toConnectSchemaWithCycles( + schema.getValueType(), getForceOptionalDefault(), null, null, toConnectContext)); break; case RECORD: { @@ -1877,10 +1739,8 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, builder = SchemaBuilder.string(); if (connectMetaData) { if (schema.getDoc() != null) { - builder.parameter(discardTypeDocDefault - ? CONNECT_ENUM_DOC_PROP - : AVRO_ENUM_DOC_PREFIX_PROP + schema.getName(), - schema.getDoc()); + builder.parameter(discardTypeDocDefault ? CONNECT_ENUM_DOC_PROP + : AVRO_ENUM_DOC_PREFIX_PROP + schema.getName(), schema.getDoc()); } if (!discardTypeDocDefault && schema.getEnumDefault() != null) { builder.parameter(AVRO_ENUM_DEFAULT_PREFIX_PROP + schema.getName(), @@ -1905,15 +1765,14 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, if (schema.getTypes().contains(NULL_AVRO_SCHEMA)) { for (org.apache.avro.Schema memberSchema : schema.getTypes()) { if (!memberSchema.equals(NULL_AVRO_SCHEMA)) { - return toConnectSchemaWithCycles( - memberSchema, true, null, docDefaultVal, toConnectContext); + return toConnectSchemaWithCycles(memberSchema, true, null, docDefaultVal, + toConnectContext); } } } } - String unionName = generalizedSumTypeSupport - ? GENERALIZED_TYPE_UNION_PREFIX + (unionIndex++) - : AVRO_TYPE_UNION; + String unionName = generalizedSumTypeSupport ? GENERALIZED_TYPE_UNION_PREFIX + (unionIndex++) + : AVRO_TYPE_UNION; builder = SchemaBuilder.struct().name(unionName); if (generalizedSumTypeSupport) { builder.parameter(GENERALIZED_TYPE_UNION, unionName); @@ -1926,13 +1785,12 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, } else { String fieldName = unionMemberFieldName(memberSchema, fieldIndex); if (fieldNames.contains(fieldName)) { - throw new DataException("Multiple union schemas map to the Connect union field name"); + throw new DataException( + "Multiple union schemas map to the Connect union field name"); } fieldNames.add(fieldName); - builder.field( - fieldName, - toConnectSchemaWithCycles(memberSchema, true, null, null, toConnectContext) - ); + builder.field(fieldName, + toConnectSchemaWithCycles(memberSchema, true, null, null, toConnectContext)); } fieldIndex++; } @@ -1946,13 +1804,13 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, throw new DataException("Standalone null schemas are not supported by this converter"); default: - throw new DataException("Couldn't translate unsupported schema type " - + schema.getType().getName() + "."); + throw new DataException( + "Couldn't translate unsupported schema type " + schema.getType().getName() + "."); } if (discardTypeDocDefault) { - String docVal = docDefaultVal != null ? docDefaultVal : - (schema.getDoc() != null ? schema.getDoc() : schema.getProp(CONNECT_DOC_PROP)); + String docVal = docDefaultVal != null ? docDefaultVal + : (schema.getDoc() != null ? schema.getDoc() : schema.getProp(CONNECT_DOC_PROP)); if (docVal != null) { builder.doc(docVal); } @@ -1968,7 +1826,7 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, } // Included Kafka Connect version takes priority, fall back to schema registry version - int versionInt = -1; // A valid version must be a positive integer (assumed throughout SR) + int versionInt = -1; // A valid version must be a positive integer (assumed throughout SR) Object versionNode = schema.getObjectProp(CONNECT_VERSION_PROP); if (versionNode != null) { if (!(versionNode instanceof Number)) { @@ -1982,11 +1840,8 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, if (builder.version() != null) { if (versionInt != builder.version()) { throw new DataException("Mismatched versions: version already added to SchemaBuilder " - + "(" - + builder.version() - + ") differs from version in source schema (" - + versionInt - + ")"); + + "(" + builder.version() + ") differs from version in source schema (" + + versionInt + ")"); } } else { builder.version(versionInt); @@ -1996,17 +1851,17 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, Object parameters = schema.getObjectProp(CONNECT_PARAMETERS_PROP); if (connectMetaData && parameters != null) { if (!(parameters instanceof Map)) { - throw new DataException("Expected JSON object for schema parameters but found: " - + parameters); + throw new DataException( + "Expected JSON object for schema parameters but found: " + parameters); } - Iterator> paramIt = - ((Map) parameters).entrySet().iterator(); + Iterator> paramIt = ((Map) parameters).entrySet() + .iterator(); while (paramIt.hasNext()) { Map.Entry field = paramIt.next(); Object jsonValue = field.getValue(); if (!(jsonValue instanceof String)) { - throw new DataException("Expected schema parameter values to be strings but found: " - + jsonValue); + throw new DataException( + "Expected schema parameter values to be strings but found: " + jsonValue); } builder.parameter(field.getKey(), (String) jsonValue); } @@ -2050,9 +1905,7 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, if (builder.name() != null) { if (!name.equals(builder.name())) { throw new DataException("Mismatched names: name already added to SchemaBuilder (" - + builder.name() - + ") differs from name in source schema (" - + name + ")"); + + builder.name() + ") differs from name in source schema (" + name + ")"); } } else { builder.name(name); @@ -2064,49 +1917,41 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, } if (!toConnectContext.detectedCycles.contains(schema) - && toConnectContext.cycleReferences.containsKey(schema)) { + && toConnectContext.cycleReferences.containsKey(schema)) { toConnectContext.cycleReferences.remove(schema); } return builder.build(); } - private Schema toConnectSchemaWithCycles(org.apache.avro.Schema schema, - boolean forceOptional, - Object fieldDefaultVal, - String docDefaultVal, - ToConnectContext toConnectContext) { + private Schema toConnectSchemaWithCycles(org.apache.avro.Schema schema, boolean forceOptional, + Object fieldDefaultVal, String docDefaultVal, ToConnectContext toConnectContext) { Schema resolvedSchema; if (toConnectContext.cycleReferences.containsKey(schema)) { toConnectContext.detectedCycles.add(schema); resolvedSchema = cyclicSchemaWrapper(toConnectContext.cycleReferences, schema, forceOptional); } else { - resolvedSchema = toConnectSchema( - schema, forceOptional, fieldDefaultVal, docDefaultVal, toConnectContext); + resolvedSchema = toConnectSchema(schema, forceOptional, fieldDefaultVal, docDefaultVal, + toConnectContext); } return resolvedSchema; } private CyclicSchemaWrapper cyclicSchemaWrapper( - Map toConnectCycles, - org.apache.avro.Schema memberSchema, - boolean optional) { + Map toConnectCycles, + org.apache.avro.Schema memberSchema, boolean optional) { return new CyclicSchemaWrapper(toConnectCycles.get(memberSchema).schema(), optional); } - private Object defaultValueFromAvro(Schema schema, - org.apache.avro.Schema avroSchema, - Object value, - ToConnectContext toConnectContext) { + private Object defaultValueFromAvro(Schema schema, org.apache.avro.Schema avroSchema, Object value, + ToConnectContext toConnectContext) { Object result = defaultValueFromAvroWithoutLogical(schema, avroSchema, value, toConnectContext); // If the schema is a logical type, convert the primitive Avro default into the logical form return toConnectLogical(schema, result); } - private Object defaultValueFromAvroWithoutLogical(Schema schema, - org.apache.avro.Schema avroSchema, - Object value, - ToConnectContext toConnectContext) { + private Object defaultValueFromAvroWithoutLogical(Schema schema, org.apache.avro.Schema avroSchema, + Object value, ToConnectContext toConnectContext) { if (value == null || value == JsonProperties.NULL_VALUE) { return null; } @@ -2171,8 +2016,8 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, } List result = new ArrayList<>(jsonValue.size()); for (JsonNode elem : jsonValue) { - Object converted = defaultValueFromAvro( - schema.valueSchema(), avroSchema.getElementType(), elem, toConnectContext); + Object converted = defaultValueFromAvro(schema.valueSchema(), avroSchema.getElementType(), + elem, toConnectContext); result.add(converted); } return result; @@ -2186,8 +2031,8 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, Iterator> fieldIt = jsonValue.fields(); while (fieldIt.hasNext()) { Map.Entry field = fieldIt.next(); - Object converted = defaultValueFromAvro( - schema.valueSchema(), avroSchema.getValueType(), field.getValue(), toConnectContext); + Object converted = defaultValueFromAvro(schema.valueSchema(), avroSchema.getValueType(), + field.getValue(), toConnectContext); result.put(field.getKey(), converted); } return result; @@ -2202,8 +2047,8 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, for (org.apache.avro.Schema.Field avroField : avroSchema.getFields()) { Field field = schema.field(avroField.name()); JsonNode fieldJson = ((JsonNode) value).get(field.name()); - Object converted = defaultValueFromAvro( - field.schema(), avroField.schema(), fieldJson, toConnectContext); + Object converted = defaultValueFromAvro(field.schema(), avroField.schema(), fieldJson, + toConnectContext); result.put(avroField.name(), converted); } return result; @@ -2217,9 +2062,7 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, } else { return defaultValueFromAvro( schema.field(unionMemberFieldName(memberAvroSchema, 0)).schema(), - memberAvroSchema, - value, - toConnectContext); + memberAvroSchema, value, toConnectContext); } } default: { @@ -2229,7 +2072,6 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, return null; } - private String unionMemberFieldName(org.apache.avro.Schema schema, int index) { if (generalizedSumTypeSupport) { return GENERALIZED_TYPE_UNION_FIELD_PREFIX + index; @@ -2265,22 +2107,17 @@ private static boolean isUnionSchema(Schema schema) { } private static boolean isEnumSchema(Schema schema) { - return schema.type() == Schema.Type.STRING - && schema.parameters() != null + return schema.type() == Schema.Type.STRING && schema.parameters() != null && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) - || schema.parameters().containsKey(AVRO_TYPE_ENUM)); + || schema.parameters().containsKey(AVRO_TYPE_ENUM)); } private static boolean isFixedSchema(Schema schema) { - return schema.type() == Schema.Type.BYTES - && schema.name() != null - && schema.parameters() != null + return schema.type() == Schema.Type.BYTES && schema.name() != null && schema.parameters() != null && schema.parameters().containsKey(CONNECT_AVRO_FIXED_SIZE_PROP); } - private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, - Object value, - int index) { + private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, Object value, int index) { if (isEnumSchema(fieldSchema)) { String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; String enumSchemaName = fieldSchema.parameters().get(paramName); @@ -2313,14 +2150,11 @@ private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, /** * Returns true if the fixed value size of the value matches the expected size */ - private boolean fixedValueSizeMatch(Schema fieldSchema, - Object value, - int size, - int index) { + private boolean fixedValueSizeMatch(Schema fieldSchema, Object value, int size, int index) { if (value instanceof byte[]) { return ((byte[]) value).length == size; } else if (value instanceof ByteBuffer) { - return ((ByteBuffer)value).remaining() == size; + return ((ByteBuffer) value).remaining() == size; } else if (value instanceof GenericFixed) { return unionMemberFieldName(((GenericFixed) value).getSchema(), index) .equals(unionMemberFieldName(fieldSchema, index)); @@ -2357,12 +2191,8 @@ private interface LogicalTypeConverter { } public static Schema nonOptional(Schema schema) { - return new ConnectSchema(schema.type(), false, schema.defaultValue(), schema.name(), - schema.version(), schema.doc(), - schema.parameters(), - fields(schema), - keySchema(schema), - valueSchema(schema)); + return new ConnectSchema(schema.type(), false, schema.defaultValue(), schema.name(), schema.version(), + schema.doc(), schema.parameters(), fields(schema), keySchema(schema), valueSchema(schema)); } public static List fields(Schema schema) { @@ -2393,7 +2223,7 @@ public static Schema valueSchema(Schema schema) { } private static boolean fieldListEquals(List one, List two, - Map, Boolean> cache) { + Map, Boolean> cache) { if (one == two) { return true; } else if (one == null || two == null) { @@ -2410,15 +2240,13 @@ private static boolean fieldListEquals(List one, List two, } } - private static boolean fieldEquals( - Field one, Field two, Map, Boolean> cache) { + private static boolean fieldEquals(Field one, Field two, Map, Boolean> cache) { if (one == two) { return true; } else if (one == null || two == null) { return false; } else { - return one.getClass() == two.getClass() - && Objects.equals(one.index(), two.index()) + return one.getClass() == two.getClass() && Objects.equals(one.index(), two.index()) && Objects.equals(one.name(), two.name()) && schemaEquals(one.schema(), two.schema(), cache); } @@ -2450,8 +2278,7 @@ public boolean equals(Object o) { return false; } Pair pair = (Pair) o; - return Objects.equals(key, pair.key) - && Objects.equals(value, pair.value); + return Objects.equals(key, pair.key) && Objects.equals(value, pair.value); } @Override @@ -2461,10 +2288,7 @@ public int hashCode() { @Override public String toString() { - return "Pair{" - + "key=" + key - + ", value=" + value - + '}'; + return "Pair{" + "key=" + key + ", value=" + value + '}'; } } @@ -2472,8 +2296,7 @@ private static boolean schemaEquals(Schema src, Schema that) { return schemaEquals(src, that, new HashMap<>()); } - private static boolean schemaEquals( - Schema src, Schema that, Map, Boolean> cache) { + private static boolean schemaEquals(Schema src, Schema that, Map, Boolean> cache) { if (src == that) { return true; } else if (src == null || that == null) { @@ -2490,10 +2313,8 @@ private static boolean schemaEquals( } boolean equals = Objects.equals(src.isOptional(), that.isOptional()) - && Objects.equals(src.version(), that.version()) - && Objects.equals(src.name(), that.name()) - && Objects.equals(src.doc(), that.doc()) - && Objects.equals(src.type(), that.type()) + && Objects.equals(src.version(), that.version()) && Objects.equals(src.name(), that.name()) + && Objects.equals(src.doc(), that.doc()) && Objects.equals(src.type(), that.type()) && Objects.deepEquals(src.defaultValue(), that.defaultValue()) && Objects.equals(src.parameters(), that.parameters()); @@ -2505,8 +2326,7 @@ private static boolean schemaEquals( equals = equals && schemaEquals(src.valueSchema(), that.valueSchema(), cache); break; case MAP: - equals = equals - && schemaEquals(src.valueSchema(), that.valueSchema(), cache) + equals = equals && schemaEquals(src.valueSchema(), that.valueSchema(), cache) && schemaEquals(src.keySchema(), that.keySchema(), cache); break; default: @@ -2618,8 +2438,8 @@ private static class ToConnectContext { private final Set detectedCycles; /** - * cycleReferences - map that holds connect Schema references to resolve cycles - * detectedCycles - avro schemas that have been detected to have cycles + * cycleReferences - map that holds connect Schema references to resolve cycles detectedCycles - avro + * schemas that have been detected to have cycles */ private ToConnectContext() { this.cycleReferences = new IdentityHashMap<>(); @@ -2631,9 +2451,9 @@ private ToConnectContext() { * Class that holds the context for performing {@code fromConnectSchema} */ private static class FromConnectContext { - //SchemaMap is used to resolve references that need to mapped as types + // SchemaMap is used to resolve references that need to mapped as types private final Map schemaMap; - //schema name to Schema reference to resolve cycles + // schema name to Schema reference to resolve cycles private final Map cycleReferences; private int defaultSchemaNameIndex = 0; diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroDataConfig.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroDataConfig.java index 70ea32db7f..9910a43dd1 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroDataConfig.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/avro/AvroDataConfig.java @@ -6,37 +6,31 @@ import java.util.HashMap; import java.util.Map; - public class AvroDataConfig extends AbstractConfig { public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG = "enhanced.avro.schema.support"; public static final boolean ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT = false; - public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_DOC = - "Toggle for enabling/disabling enhanced avro schema support: Enum symbol preservation and " - + "Package Name awareness"; + public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_DOC = "Toggle for enabling/disabling enhanced avro schema support: Enum symbol preservation and " + + "Package Name awareness"; public static final String CONNECT_META_DATA_CONFIG = "connect.meta.data"; public static final boolean CONNECT_META_DATA_DEFAULT = true; - public static final String CONNECT_META_DATA_DOC = - "Toggle for enabling/disabling connect converter to add its meta data to the output schema " - + "or not"; + public static final String CONNECT_META_DATA_DOC = "Toggle for enabling/disabling connect converter to add its meta data to the output schema " + + "or not"; public static final String SCHEMAS_CACHE_SIZE_CONFIG = "schemas.cache.config"; public static final int SCHEMAS_CACHE_SIZE_DEFAULT = 1000; - public static final String SCHEMAS_CACHE_SIZE_DOC = - "Size of the converted schemas cache"; + public static final String SCHEMAS_CACHE_SIZE_DOC = "Size of the converted schemas cache"; public static ConfigDef baseConfigDef() { return new ConfigDef() - .define(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG, - ConfigDef.Type.BOOLEAN, - ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT, - ConfigDef.Importance.MEDIUM, - ENHANCED_AVRO_SCHEMA_SUPPORT_DOC) - .define(CONNECT_META_DATA_CONFIG, ConfigDef.Type.BOOLEAN, CONNECT_META_DATA_DEFAULT, - ConfigDef.Importance.LOW, CONNECT_META_DATA_DOC) - .define(SCHEMAS_CACHE_SIZE_CONFIG, ConfigDef.Type.INT, SCHEMAS_CACHE_SIZE_DEFAULT, - ConfigDef.Importance.LOW, SCHEMAS_CACHE_SIZE_DOC); + .define(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG, ConfigDef.Type.BOOLEAN, + ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT, ConfigDef.Importance.MEDIUM, + ENHANCED_AVRO_SCHEMA_SUPPORT_DOC) + .define(CONNECT_META_DATA_CONFIG, ConfigDef.Type.BOOLEAN, CONNECT_META_DATA_DEFAULT, + ConfigDef.Importance.LOW, CONNECT_META_DATA_DOC) + .define(SCHEMAS_CACHE_SIZE_CONFIG, ConfigDef.Type.INT, SCHEMAS_CACHE_SIZE_DEFAULT, + ConfigDef.Importance.LOW, SCHEMAS_CACHE_SIZE_DOC); } public AvroDataConfig(Map props) { @@ -55,7 +49,6 @@ public int getSchemasCacheSize() { return this.getInt(SCHEMAS_CACHE_SIZE_CONFIG); } - public static class Builder { private Map props = new HashMap<>(); diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/CompactFormatStrategy.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/CompactFormatStrategy.java index 9715bde44c..bd943833c0 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/CompactFormatStrategy.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/CompactFormatStrategy.java @@ -1,13 +1,13 @@ package io.apicurio.registry.utils.converter.json; -import java.nio.ByteBuffer; -import java.util.Objects; - import io.apicurio.registry.resolver.strategy.ArtifactReference; import io.apicurio.registry.serde.AbstractKafkaSerDe; import io.apicurio.registry.serde.DefaultIdHandler; import io.apicurio.registry.serde.IdHandler; +import java.nio.ByteBuffer; +import java.util.Objects; + public class CompactFormatStrategy implements FormatStrategy { private IdHandler idHandler; diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/FormatStrategy.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/FormatStrategy.java index 8abf00df6f..d2c6dec79b 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/FormatStrategy.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/FormatStrategy.java @@ -2,6 +2,7 @@ public interface FormatStrategy { byte[] fromConnectData(long globalId, byte[] payload); + IdPayload toConnectData(byte[] bytes); class IdPayload { diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterMetadata.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterMetadata.java index 733d091271..5fad41011e 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterMetadata.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterMetadata.java @@ -1,16 +1,16 @@ package io.apicurio.registry.utils.converter.json; +import io.apicurio.registry.serde.data.KafkaSerdeMetadata; import org.apache.kafka.common.header.Headers; import org.apache.kafka.connect.data.Schema; -import io.apicurio.registry.serde.data.KafkaSerdeMetadata; - public class JsonConverterMetadata extends KafkaSerdeMetadata { private Schema schema; /** * Constructor. + * * @param topic * @param isKey * @param headers diff --git a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterRecord.java b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterRecord.java index 0c3f49ff13..a8113465b0 100644 --- a/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterRecord.java +++ b/utils/converter/src/main/java/io/apicurio/registry/utils/converter/json/JsonConverterRecord.java @@ -6,6 +6,7 @@ public class JsonConverterRecord extends KafkaSerdeRecord { /** * Constructor. + * * @param metadata * @param payload */ diff --git a/utils/converter/src/test/java/io/apicurio/registry/utils/converter/avro/AvroDataTest.java b/utils/converter/src/test/java/io/apicurio/registry/utils/converter/avro/AvroDataTest.java index a4d71c65d1..8e5e462f24 100644 --- a/utils/converter/src/test/java/io/apicurio/registry/utils/converter/avro/AvroDataTest.java +++ b/utils/converter/src/test/java/io/apicurio/registry/utils/converter/avro/AvroDataTest.java @@ -10,21 +10,11 @@ public class AvroDataTest { @Test public void testIntWithConnectDefault() { - final String s = "{" - + " \"type\": \"record\"," - + " \"name\": \"sample\"," - + " \"namespace\": \"io.apicurio\"," - + " \"fields\": [" - + " {" - + " \"name\": \"prop\"," - + " \"type\": {" - + " \"type\": \"int\"," - + " \"connect.default\": 42," - + " \"connect.version\": 1" - + " }" - + " }" - + " ]" - + "}"; + final String s = "{" + " \"type\": \"record\"," + " \"name\": \"sample\"," + + " \"namespace\": \"io.apicurio\"," + " \"fields\": [" + " {" + + " \"name\": \"prop\"," + " \"type\": {" + " \"type\": \"int\"," + + " \"connect.default\": 42," + " \"connect.version\": 1" + " }" + " }" + + " ]" + "}"; org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(s); @@ -36,21 +26,11 @@ public void testIntWithConnectDefault() { @Test public void testLongWithConnectDefault() { - final String s = "{" - + " \"type\": \"record\"," - + " \"name\": \"sample\"," - + " \"namespace\": \"io.apicurio\"," - + " \"fields\": [" - + " {" - + " \"name\": \"prop\"," - + " \"type\": {" - + " \"type\": \"long\"," - + " \"connect.default\": 42," - + " \"connect.version\": 1" - + " }" - + " }" - + " ]" - + "}"; + final String s = "{" + " \"type\": \"record\"," + " \"name\": \"sample\"," + + " \"namespace\": \"io.apicurio\"," + " \"fields\": [" + " {" + + " \"name\": \"prop\"," + " \"type\": {" + " \"type\": \"long\"," + + " \"connect.default\": 42," + " \"connect.version\": 1" + " }" + " }" + + " ]" + "}"; org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(s); @@ -62,16 +42,9 @@ public void testLongWithConnectDefault() { @Test public void testAvroInt64WithInteger() { - final String s = "{" - + " \"type\": \"record\"," - + " \"name\": \"sample\"," - + " \"namespace\": \"io.apicurio\"," - + " \"fields\": [" - + " {" - + " \"name\": \"someprop\"," - + " \"type\": [\"long\",\"null\"]" - + " }" - + " ]" + final String s = "{" + " \"type\": \"record\"," + " \"name\": \"sample\"," + + " \"namespace\": \"io.apicurio\"," + " \"fields\": [" + " {" + + " \"name\": \"someprop\"," + " \"type\": [\"long\",\"null\"]" + " }" + " ]" + "}"; org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(s); @@ -82,34 +55,18 @@ public void testAvroInt64WithInteger() { @Test public void testDecimal() { - final String s = "{" - + " \"type\": \"record\"," - + " \"name\": \"sample\"," - + " \"namespace\": \"io.apicurio\"," - + " \"fields\": [" - + " {" - + " \"name\": \"somedecimal\"," - + " \"type\": [\n" - + " {\n" - + " \"type\": \"bytes\",\n" - + " \"scale\": 4,\n" - + " \"precision\": 4,\n" - + " \"connect.version\": 1,\n" - + " \"connect.parameters\": {\n" - + " \"scale\": \"4\",\n" - + " \"connect.decimal.precision\": \"4\"\n" - + " },\n" + final String s = "{" + " \"type\": \"record\"," + " \"name\": \"sample\"," + + " \"namespace\": \"io.apicurio\"," + " \"fields\": [" + " {" + + " \"name\": \"somedecimal\"," + " \"type\": [\n" + " {\n" + + " \"type\": \"bytes\",\n" + " \"scale\": 4,\n" + + " \"precision\": 4,\n" + " \"connect.version\": 1,\n" + + " \"connect.parameters\": {\n" + " \"scale\": \"4\",\n" + + " \"connect.decimal.precision\": \"4\"\n" + " },\n" + " \"connect.default\": \"AA==\",\n" + " \"connect.name\": \"org.apache.kafka.connect.data.Decimal\",\n" - + " \"logicalType\": \"decimal\"\n" - + " },\n" - + " \"null\"\n" - + " ],\n" - + " \"default\": \"AA==\"" - + " }" - + " ]," - + "\"connect.name\":\"io.apicurio.sample\"\n" - + "}"; + + " \"logicalType\": \"decimal\"\n" + " },\n" + " \"null\"\n" + + " ],\n" + " \"default\": \"AA==\"" + " }" + " ]," + + "\"connect.name\":\"io.apicurio.sample\"\n" + "}"; org.apache.avro.Schema bSchema = new org.apache.avro.Schema.Parser().parse(s); AvroData avroData = new AvroData(0); diff --git a/utils/exportConfluent/README.md b/utils/exportConfluent/README.md index e45c72d09e..11b407d536 100644 --- a/utils/exportConfluent/README.md +++ b/utils/exportConfluent/README.md @@ -44,4 +44,3 @@ curl -X POST "http:///apis/registry/v3/admin/import" \ -H "X-Registry-Preserve-ContentId: false" \ --data-binary @confluent-schema-registry-export.zip ``` - diff --git a/utils/exportConfluent/pom.xml b/utils/exportConfluent/pom.xml index c29c106abf..7378b29d60 100644 --- a/utils/exportConfluent/pom.xml +++ b/utils/exportConfluent/pom.xml @@ -1,149 +1,146 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-utils-exportConfluent - jar - apicurio-registry-utils-exportConfluent + apicurio-registry-utils-exportConfluent + jar + apicurio-registry-utils-exportConfluent - + - - io.quarkus - quarkus-arc - - - - io.quarkus - quarkus-junit5 - test - + + io.quarkus + quarkus-arc + - - commons-codec - commons-codec - + + io.quarkus + quarkus-junit5 + test + - - io.apicurio - apicurio-registry-utils-import-export - + + commons-codec + commons-codec + - - - io.confluent - kafka-schema-registry-client - 7.6.1 - - - - - - - org.apache.maven.plugins - maven-checkstyle-plugin - - - com.puppycrawl.tools - checkstyle - ${version.puppycrawl} - - - - - validate - validate - - ../../.checkstyle/simple.xml - ../../.checkstyle/java.header - ../../.checkstyle/suppressions.xml - true - UTF-8 - true - true - - - check - - - - - - - io.smallrye - jandex-maven-plugin - - - make-index - - jandex - - - - + + io.apicurio + apicurio-registry-utils-import-export + - - io.quarkus - quarkus-maven-plugin - - - - build - - - - + + + io.confluent + kafka-schema-registry-client + 7.6.1 + + - - org.apache.maven.plugins - maven-antrun-plugin - 3.1.0 - - - package - - - - - - - run - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - - - default - package - - single - - - false - true - - src/main/assembly/exportConfluent.xml - - ${tar.long.file.mode} - - - - - - + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + com.puppycrawl.tools + checkstyle + ${version.puppycrawl} + + + + + validate + + check + + validate + + ../../.checkstyle/simple.xml + ../../.checkstyle/java.header + ../../.checkstyle/suppressions.xml + true + UTF-8 + true + true + + + + + + + io.smallrye + jandex-maven-plugin + + + make-index + + jandex + + + + + + + io.quarkus + quarkus-maven-plugin + + + + build + + + + + + + org.apache.maven.plugins + maven-antrun-plugin + 3.1.0 + + + + run + + package + + + + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + + + default + + single + + package + + false + true + + + src/main/assembly/exportConfluent.xml + + ${tar.long.file.mode} + + + + + + diff --git a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/Export.java b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/Export.java index 6148a9abca..411f738339 100644 --- a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/Export.java +++ b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/Export.java @@ -28,9 +28,6 @@ import org.apache.commons.codec.digest.DigestUtils; import org.jboss.logging.Logger; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLSocketFactory; -import javax.net.ssl.TrustManager; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -44,6 +41,10 @@ import java.util.Map; import java.util.zip.ZipOutputStream; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.TrustManager; + @QuarkusMain(name = "ConfluentExport") public class Export implements QuarkusApplication { @@ -126,7 +127,6 @@ public int run(String... args) throws Exception { } } - String globalCompatibility = client.getCompatibility(null); GlobalRuleEntity ruleEntity = new GlobalRuleEntity(); @@ -158,7 +158,7 @@ public int run(String... args) throws Exception { public SSLSocketFactory getInsecureSSLSocketFactory() { try { SSLContext sslContext = SSLContext.getInstance("SSL"); - sslContext.init(null, new TrustManager[]{new FakeTrustManager()}, new SecureRandom()); + sslContext.init(null, new TrustManager[] { new FakeTrustManager() }, new SecureRandom()); return sslContext.getSocketFactory(); } catch (Exception ex) { log.error("Could not create Insecure SSL Socket Factory", ex); @@ -184,8 +184,10 @@ public void exportSubject(ExportContext context, String subject) throws RestClie context.getWriter().writeEntity(artifactEntity); } - public void exportSubjectVersionWithRefs(ExportContext context, String subject, Integer version) throws RestClientException, IOException { - if (context.getExportedSubjectVersions().stream().anyMatch(subjectVersionPair -> subjectVersionPair.is(subject, version))) { + public void exportSubjectVersionWithRefs(ExportContext context, String subject, Integer version) + throws RestClientException, IOException { + if (context.getExportedSubjectVersions().stream() + .anyMatch(subjectVersionPair -> subjectVersionPair.is(subject, version))) { return; } context.getExportedSubjectVersions().add(new SubjectVersionPair(subject, version)); @@ -248,6 +250,7 @@ public void exportSubjectVersionWithRefs(ExportContext context, String subject, /** * Serializes the given collection of references to a string + * * @param references */ private String serializeReferences(List references) { diff --git a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/ExportContext.java b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/ExportContext.java index b14d369786..c18a7db725 100644 --- a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/ExportContext.java +++ b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/ExportContext.java @@ -16,7 +16,8 @@ public class ExportContext { private final List exportedSubjectVersions = new ArrayList<>(); private final Map contentIndex = new HashMap<>(); - public ExportContext(EntityWriter writer, RestService restService, SchemaRegistryClient schemaRegistryClient) { + public ExportContext(EntityWriter writer, RestService restService, + SchemaRegistryClient schemaRegistryClient) { this.writer = writer; this.restService = restService; this.schemaRegistryClient = schemaRegistryClient; diff --git a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/FakeTrustManager.java b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/FakeTrustManager.java index d5991cea29..80dae5bad1 100644 --- a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/FakeTrustManager.java +++ b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/FakeTrustManager.java @@ -1,8 +1,9 @@ package io.apicurio.registry.utils.export; -import javax.net.ssl.X509TrustManager; import java.security.cert.X509Certificate; +import javax.net.ssl.X509TrustManager; + public class FakeTrustManager implements X509TrustManager { @Override diff --git a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/OptionsParser.java b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/OptionsParser.java index f12b2db721..fe6c5877c7 100644 --- a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/OptionsParser.java +++ b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/OptionsParser.java @@ -23,8 +23,7 @@ public OptionsParser(String[] args) { inSecure = true; } else if (arg.equals("--client-props")) { String[] clientconf = Arrays.copyOfRange(args, i + 1, args.length); - clientProps = Arrays.stream(clientconf) - .map(keyvalue -> keyvalue.split("=")) + clientProps = Arrays.stream(clientconf).map(keyvalue -> keyvalue.split("=")) .collect(Collectors.toMap(kv -> kv[0], kv -> kv[1])); System.out.println("Parsed client properties " + clientProps); break; diff --git a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/mappers/ArtifactReferenceMapper.java b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/mappers/ArtifactReferenceMapper.java index fb707c8c37..de61b58a4e 100644 --- a/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/mappers/ArtifactReferenceMapper.java +++ b/utils/exportConfluent/src/main/java/io/apicurio/registry/utils/export/mappers/ArtifactReferenceMapper.java @@ -2,7 +2,6 @@ import io.apicurio.registry.rest.v3.beans.ArtifactReference; import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; - import jakarta.inject.Singleton; @Singleton diff --git a/utils/importexport/pom.xml b/utils/importexport/pom.xml index 859b211832..9e6853aa22 100644 --- a/utils/importexport/pom.xml +++ b/utils/importexport/pom.xml @@ -1,57 +1,54 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - - - apicurio-registry-utils-import-export - jar - apicurio-registry-utils-import-export - - - - - io.apicurio - apicurio-registry-common - - - - com.fasterxml.jackson.core - jackson-databind - - - - io.quarkus - quarkus-core - provided - - - - - org.junit.jupiter - junit-jupiter - test - - - - - - - - src/test/resources - true - - - src/test/resources-unfiltered - false - - - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-utils-import-export + jar + apicurio-registry-utils-import-export + + + + + io.apicurio + apicurio-registry-common + + + + com.fasterxml.jackson.core + jackson-databind + + + + io.quarkus + quarkus-core + provided + + + + + org.junit.jupiter + junit-jupiter + test + + + + + + + + true + src/test/resources + + + false + src/test/resources-unfiltered + + + diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactEntity.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactEntity.java index 0a44879eb2..28952a1daf 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactEntity.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactEntity.java @@ -16,7 +16,7 @@ @AllArgsConstructor(access = PRIVATE) @ToString @RegisterForReflection -@JsonIgnoreProperties({"isLatest"}) +@JsonIgnoreProperties({ "isLatest" }) public class ArtifactEntity extends Entity { public String groupId; diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactVersionEntity.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactVersionEntity.java index 35b1d36913..1c4c373dfd 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactVersionEntity.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ArtifactVersionEntity.java @@ -18,7 +18,7 @@ @AllArgsConstructor(access = PRIVATE) @ToString @RegisterForReflection -@JsonIgnoreProperties({"isLatest"}) +@JsonIgnoreProperties({ "isLatest" }) public class ArtifactVersionEntity extends Entity { public long globalId; @@ -26,7 +26,7 @@ public class ArtifactVersionEntity extends Entity { public String artifactId; public String version; - @JsonAlias({"versionId"}) + @JsonAlias({ "versionId" }) public int versionOrder; public VersionState state; diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/Entity.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/Entity.java index 9e3500c3e9..bb622513f5 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/Entity.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/Entity.java @@ -1,7 +1,6 @@ package io.apicurio.registry.utils.impexp; import com.fasterxml.jackson.annotation.JsonIgnore; - import io.quarkus.runtime.annotations.RegisterForReflection; @RegisterForReflection diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityReader.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityReader.java index bf9365951c..125faa353b 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityReader.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityReader.java @@ -22,6 +22,7 @@ public class EntityReader { /** * Constructor. + * * @param zip */ public EntityReader(ZipInputStream zip) { @@ -123,7 +124,7 @@ private EntityType parseEntityType(String path) { } private T readEntry(ZipEntry entry, Class theClass) throws IOException { - byte [] bytes = IoUtil.toBytes(zip, false); + byte[] bytes = IoUtil.toBytes(zip, false); T entity = mapper.readerFor(theClass).readValue(bytes); return entity; } diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityWriter.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityWriter.java index 224f246d87..0b9c6fb821 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityWriter.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/EntityWriter.java @@ -21,6 +21,7 @@ public class EntityWriter { /** * Constructor. + * * @param zip */ public EntityWriter(ZipOutputStream zip) { @@ -29,6 +30,7 @@ public EntityWriter(ZipOutputStream zip) { /** * Writes the given entity to the zip output stream. + * * @param entity * @throws IOException */ @@ -80,7 +82,8 @@ private void writeEntity(ContentEntity entity) throws IOException { } private void writeEntity(ManifestEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.Manifest, "manifest-" + entity.exportedOn.toInstant().toString(), "json"); + ZipEntry mdEntry = createZipEntry(EntityType.Manifest, + "manifest-" + entity.exportedOn.toInstant().toString(), "json"); write(mdEntry, entity, ManifestEntity.class); } @@ -90,17 +93,20 @@ private void writeEntity(GroupEntity entity) throws IOException { } private void writeEntity(ArtifactEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.Artifact, entity.groupId, entity.artifactId, "MetaData", "json"); + ZipEntry mdEntry = createZipEntry(EntityType.Artifact, entity.groupId, entity.artifactId, "MetaData", + "json"); write(mdEntry, entity, ArtifactEntity.class); } private void writeEntity(ArtifactVersionEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.ArtifactVersion, entity.groupId, entity.artifactId, entity.version, "json"); + ZipEntry mdEntry = createZipEntry(EntityType.ArtifactVersion, entity.groupId, entity.artifactId, + entity.version, "json"); write(mdEntry, entity, ArtifactVersionEntity.class); } private void writeEntity(ArtifactRuleEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.ArtifactRule, entity.groupId, entity.artifactId, entity.type.name(), "json"); + ZipEntry mdEntry = createZipEntry(EntityType.ArtifactRule, entity.groupId, entity.artifactId, + entity.type.name(), "json"); write(mdEntry, entity, ArtifactRuleEntity.class); } @@ -110,12 +116,14 @@ private void writeEntity(GlobalRuleEntity entity) throws IOException { } private void writeEntity(CommentEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.Comment, entity.globalId + '-' + entity.commentId, "json"); + ZipEntry mdEntry = createZipEntry(EntityType.Comment, entity.globalId + '-' + entity.commentId, + "json"); write(mdEntry, entity, CommentEntity.class); } private void writeEntity(BranchEntity entity) throws IOException { - ZipEntry mdEntry = createZipEntry(EntityType.Branch, entity.groupId, entity.artifactId, entity.branchId, "json"); + ZipEntry mdEntry = createZipEntry(EntityType.Branch, entity.groupId, entity.artifactId, + entity.branchId, "json"); write(mdEntry, entity, BranchEntity.class); } @@ -123,21 +131,26 @@ private ZipEntry createZipEntry(EntityType type, String fileName, String fileExt return createZipEntry(type, null, null, fileName, fileExt); } - private ZipEntry createZipEntry(EntityType type, String groupId, String artifactId, String fileName, String fileExt) { + private ZipEntry createZipEntry(EntityType type, String groupId, String artifactId, String fileName, + String fileExt) { // TODO encode groupId, artifactId, and filename as path elements String path = null; switch (type) { case ArtifactRule: - path = String.format("groups/%s/artifacts/%s/rules/%s.%s.%s", groupOrDefault(groupId), artifactId, fileName, type.name(), fileExt); + path = String.format("groups/%s/artifacts/%s/rules/%s.%s.%s", groupOrDefault(groupId), + artifactId, fileName, type.name(), fileExt); break; case Artifact: - path = String.format("groups/%s/artifacts/%s/%s.%s.%s", groupOrDefault(groupId), artifactId, fileName, type.name(), fileExt); + path = String.format("groups/%s/artifacts/%s/%s.%s.%s", groupOrDefault(groupId), artifactId, + fileName, type.name(), fileExt); break; case Branch: - path = String.format("groups/%s/artifacts/%s/branches/%s.%s.%s", groupOrDefault(groupId), artifactId, fileName, type.name(), fileExt); + path = String.format("groups/%s/artifacts/%s/branches/%s.%s.%s", groupOrDefault(groupId), + artifactId, fileName, type.name(), fileExt); break; case ArtifactVersion: - path = String.format("groups/%s/artifacts/%s/versions/%s.%s.%s", groupOrDefault(groupId), artifactId, fileName, type.name(), fileExt); + path = String.format("groups/%s/artifacts/%s/versions/%s.%s.%s", groupOrDefault(groupId), + artifactId, fileName, type.name(), fileExt); break; case Content: path = String.format("content/%s.%s.%s", fileName, type.name(), fileExt); diff --git a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ManifestEntity.java b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ManifestEntity.java index 0feac530d2..6886650a3a 100644 --- a/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ManifestEntity.java +++ b/utils/importexport/src/main/java/io/apicurio/registry/utils/impexp/ManifestEntity.java @@ -1,9 +1,9 @@ package io.apicurio.registry.utils.impexp; -import java.util.Date; - import io.quarkus.runtime.annotations.RegisterForReflection; +import java.util.Date; + @RegisterForReflection public class ManifestEntity extends Entity { diff --git a/utils/kafka/pom.xml b/utils/kafka/pom.xml index f6b4cca480..92bcf624e6 100644 --- a/utils/kafka/pom.xml +++ b/utils/kafka/pom.xml @@ -1,74 +1,71 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-utils-kafka - jar - apicurio-registry-utils-kafka + apicurio-registry-utils-kafka + jar + apicurio-registry-utils-kafka - + - - org.eclipse.microprofile.config - microprofile-config-api - + + org.eclipse.microprofile.config + microprofile-config-api + - - org.apache.kafka - kafka-clients - - - org.slf4j - slf4j-api - - - org.jboss.slf4j - slf4j-jboss-logging - ${jboss-slf4j.version} - - - io.quarkus - quarkus-test-common - test - - - org.junit.jupiter - junit-jupiter - test - + + org.apache.kafka + kafka-clients + + + org.slf4j + slf4j-api + + + org.jboss.slf4j + slf4j-jboss-logging + ${jboss-slf4j.version} + + + io.quarkus + quarkus-test-common + test + + + org.junit.jupiter + junit-jupiter + test + - + - - - - kr.motd.maven - os-maven-plugin - ${os-maven-plugin.version} - - + - - - org.apache.maven.plugins - maven-jar-plugin - - - - test-jar - - - - - - + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + kr.motd.maven + os-maven-plugin + ${os-maven-plugin.version} + + + diff --git a/utils/kafka/src/main/java/io/apicurio/registry/utils/ConcurrentUtil.java b/utils/kafka/src/main/java/io/apicurio/registry/utils/ConcurrentUtil.java index 58ff160f43..ca12741543 100644 --- a/utils/kafka/src/main/java/io/apicurio/registry/utils/ConcurrentUtil.java +++ b/utils/kafka/src/main/java/io/apicurio/registry/utils/ConcurrentUtil.java @@ -24,7 +24,8 @@ public static T get(CompletableFuture cf, long duration, TimeUnit unit) { Throwable t = e.getCause(); if (t instanceof RuntimeException) throw (RuntimeException) t; - if (t instanceof Error) throw (Error) t; + if (t instanceof Error) + throw (Error) t; throw new RuntimeException(e); } } diff --git a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/AsyncProducer.java b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/AsyncProducer.java index d4d5cbee16..57dd5a2464 100644 --- a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/AsyncProducer.java +++ b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/AsyncProducer.java @@ -18,9 +18,9 @@ import java.util.concurrent.CompletableFuture; /** - * An async wrapper for kafka producer that is resilient in the event of failures - it recreates the underlying - * kafka producer when unrecoverable error occurs. - * This producer is not suitable for transactional use. It is suitable for normal or idempotent use. + * An async wrapper for kafka producer that is resilient in the event of failures - it recreates the + * underlying kafka producer when unrecoverable error occurs. This producer is not suitable for transactional + * use. It is suitable for normal or idempotent use. */ public class AsyncProducer implements ProducerActions { private static final Logger log = LoggerFactory.getLogger(AsyncProducer.class); @@ -74,7 +74,8 @@ private synchronized KafkaProducer getProducer() { private synchronized void closeProducer(KafkaProducer producer, boolean fromCallback) { try { - if (producer == null) producer = this.producer; + if (producer == null) + producer = this.producer; if (producer != null && producer == this.producer) { try { log.info("Closing resilient producer."); @@ -90,7 +91,8 @@ private synchronized void closeProducer(KafkaProducer producer, boolean fr } } } finally { - if (!fromCallback) closed = true; + if (!fromCallback) + closed = true; } } @@ -117,10 +119,8 @@ public void onCompletion(RecordMetadata metadata, Exception exception) { } private boolean isFatalException(Exception e) { - return e instanceof UnsupportedVersionException || - e instanceof AuthorizationException || - e instanceof ProducerFencedException || - e instanceof OutOfOrderSequenceException; + return e instanceof UnsupportedVersionException || e instanceof AuthorizationException + || e instanceof ProducerFencedException || e instanceof OutOfOrderSequenceException; } } } diff --git a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/KafkaUtil.java b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/KafkaUtil.java index 329a1b0740..c116ec6116 100644 --- a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/KafkaUtil.java +++ b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/KafkaUtil.java @@ -45,11 +45,13 @@ public static CompletionStage toCompletionStage(KafkaFuture kf) { * @param topicNames topics to create, if they don't exist * @param topicConfig the config to use for the new topic */ - public static void createTopics(Properties properties, Set topicNames, Map topicConfig) { + public static void createTopics(Properties properties, Set topicNames, + Map topicConfig) { try (Admin admin = Admin.create(properties)) { createTopics(admin, topicNames, topicConfig); } } + public static void createTopics(Properties properties, Set topicNames) { createTopics(properties, topicNames, null); } @@ -57,13 +59,14 @@ public static void createTopics(Properties properties, Set topicNames) { /** * Create topics with sensible defaults. * - * @param admin the Kafka admin to use + * @param admin the Kafka admin to use * @param topicNames topics to create, if they don't exist * @param topicConfig the config to use for the new topic */ public static void createTopics(Admin admin, Set topicNames, Map topicConfig) { ConcurrentUtil.result(createTopicsAsync(admin, topicNames, topicConfig)); } + public static void createTopics(Admin admin, Set topicNames) { createTopics(admin, topicNames, null); } @@ -71,36 +74,32 @@ public static void createTopics(Admin admin, Set topicNames) { /** * Create topics with sensible defaults, async. * - * @param admin the Kafka admin to use + * @param admin the Kafka admin to use * @param topicNames topics to create, if they don't exist */ - public static CompletionStage createTopicsAsync(Admin admin, Set topicNames, Map topicConfig) { + public static CompletionStage createTopicsAsync(Admin admin, Set topicNames, + Map topicConfig) { List> topicsToCreate = new ArrayList<>(); - return toCompletionStage(admin.listTopics().names()) - .thenCompose(topics -> { - for (String topicName : topicNames) { - createTopic(admin, topics, topicsToCreate, topicName, topicConfig); - } - //noinspection SuspiciousToArrayCall - return CompletableFuture.allOf(topicsToCreate.toArray(new CompletableFuture[0])); - }) - .thenCompose(v -> { - if (topicsToCreate.size() > 0) { - return toCompletionStage( - admin.createTopics( - topicsToCreate.stream() - .map(ConcurrentUtil::result) - .collect(Collectors.toList()) - ) - .all() - ); - } else { - return CompletableFuture.completedFuture(null); - } - }); + return toCompletionStage(admin.listTopics().names()).thenCompose(topics -> { + for (String topicName : topicNames) { + createTopic(admin, topics, topicsToCreate, topicName, topicConfig); + } + // noinspection SuspiciousToArrayCall + return CompletableFuture.allOf(topicsToCreate.toArray(new CompletableFuture[0])); + }).thenCompose(v -> { + if (topicsToCreate.size() > 0) { + return toCompletionStage(admin.createTopics( + topicsToCreate.stream().map(ConcurrentUtil::result).collect(Collectors.toList())) + .all()); + } else { + return CompletableFuture.completedFuture(null); + } + }); } - private static void createTopic(Admin admin, Set topics, List> topicsToCreate, String topicName, Map topicConfig) { + private static void createTopic(Admin admin, Set topics, + List> topicsToCreate, String topicName, + Map topicConfig) { if (!topics.contains(topicName)) { KafkaFuture newTopicKF = admin.describeCluster().nodes().thenApply(nodes -> { Map configs = new HashMap<>(); @@ -113,7 +112,8 @@ private static void createTopic(Admin admin, Set topics, List log.info("Created new topic: {}", topicName, t)); topicsToCreate.add(toCompletionStage(newTopicKF)); diff --git a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/ProducerActions.java b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/ProducerActions.java index d2036f55f5..4d5bf82c53 100644 --- a/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/ProducerActions.java +++ b/utils/kafka/src/main/java/io/apicurio/registry/utils/kafka/ProducerActions.java @@ -6,5 +6,6 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; -public interface ProducerActions extends Function, CompletableFuture>, AutoCloseable { +public interface ProducerActions + extends Function, CompletableFuture>, AutoCloseable { } diff --git a/utils/maven-plugin/pom.xml b/utils/maven-plugin/pom.xml index 908f6402e4..432cd89f0e 100644 --- a/utils/maven-plugin/pom.xml +++ b/utils/maven-plugin/pom.xml @@ -1,70 +1,67 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-maven-plugin - maven-plugin - apicurio-registry-maven-plugin + apicurio-registry-maven-plugin + maven-plugin + apicurio-registry-maven-plugin - - - io.apicurio - apicurio-registry-java-sdk - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.avro - avro - - - com.google.protobuf - protobuf-java - - - com.google.protobuf - protobuf-java-util - - - org.apache.maven - maven-plugin-api - provided - - - org.apache.maven.plugin-tools - maven-plugin-annotations - provided - - - commons-io - commons-io - - - io.apicurio - apicurio-registry-schema-util-provider - + + + io.apicurio + apicurio-registry-java-sdk + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.avro + avro + + + com.google.protobuf + protobuf-java + + + com.google.protobuf + protobuf-java-util + + + org.apache.maven + maven-plugin-api + provided + + + org.apache.maven.plugin-tools + maven-plugin-annotations + provided + + + commons-io + commons-io + + + io.apicurio + apicurio-registry-schema-util-provider + - + - - - - org.apache.maven.plugins - maven-plugin-plugin - 3.13.1 - - - + + + + org.apache.maven.plugins + maven-plugin-plugin + 3.13.1 + + + diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractDirectoryParser.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractDirectoryParser.java index 6822100cb2..a212971001 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractDirectoryParser.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractDirectoryParser.java @@ -39,7 +39,9 @@ public AbstractDirectoryParser(RegistryClient client) { public abstract ParsedDirectoryWrapper parse(File rootSchema); - public abstract List handleSchemaReferences(RegisterArtifact rootArtifact, Schema schema, Map fileContents) throws FileNotFoundException, ExecutionException, InterruptedException; + public abstract List handleSchemaReferences(RegisterArtifact rootArtifact, + Schema schema, Map fileContents) + throws FileNotFoundException, ExecutionException, InterruptedException; protected ContentHandle readSchemaContent(File schemaFile) { try { @@ -62,10 +64,11 @@ protected RegisterArtifact buildFromRoot(RegisterArtifact rootArtifact, String a return nestedSchema; } - protected ArtifactReference registerNestedSchema(String referenceName, List nestedArtifactReferences, - RegisterArtifact nestedSchema, String artifactContent) throws FileNotFoundException, ExecutionException, InterruptedException - { - CreateArtifactResponse car = registerArtifact(nestedSchema, IoUtil.toStream(artifactContent), nestedArtifactReferences); + protected ArtifactReference registerNestedSchema(String referenceName, + List nestedArtifactReferences, RegisterArtifact nestedSchema, + String artifactContent) throws FileNotFoundException, ExecutionException, InterruptedException { + CreateArtifactResponse car = registerArtifact(nestedSchema, IoUtil.toStream(artifactContent), + nestedArtifactReferences); ArtifactReference referencedArtifact = new ArtifactReference(); referencedArtifact.setName(referenceName); referencedArtifact.setArtifactId(car.getArtifact().getArtifactId()); @@ -74,13 +77,15 @@ protected ArtifactReference registerNestedSchema(String referenceName, List references) throws ExecutionException, InterruptedException { + private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, InputStream artifactContent, + List references) throws ExecutionException, InterruptedException { String groupId = artifact.getGroupId(); String artifactId = artifact.getArtifactId(); String version = artifact.getVersion(); String type = artifact.getArtifactType(); Boolean canonicalize = artifact.getCanonicalize(); - String ct = artifact.getContentType() == null ? ContentTypes.APPLICATION_JSON : artifact.getContentType(); + String ct = artifact.getContentType() == null ? ContentTypes.APPLICATION_JSON + : artifact.getContentType(); String data = null; try { if (artifact.getMinify() != null && artifact.getMinify()) { @@ -115,17 +120,15 @@ private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, Input }).collect(Collectors.toList())); createVersion.setContent(content); - var amd = client - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact, config -> { - config.queryParameters.ifExists = IfArtifactExists.forValue(artifact.getIfExists().value()); - config.queryParameters.canonical = canonicalize; - }); - - // client.createArtifact(groupId, artifactId, version, type, ifExists, canonicalize, null, null, ContentTypes.APPLICATION_CREATE_EXTENDED, null, null, artifactContent, references); - log.info(String.format("Successfully registered artifact [%s] / [%s]. GlobalId is [%d]", groupId, artifactId, amd.getVersion().getGlobalId())); + var amd = client.groups().byGroupId(groupId).artifacts().post(createArtifact, config -> { + config.queryParameters.ifExists = IfArtifactExists.forValue(artifact.getIfExists().value()); + config.queryParameters.canonical = canonicalize; + }); + + // client.createArtifact(groupId, artifactId, version, type, ifExists, canonicalize, null, null, + // ContentTypes.APPLICATION_CREATE_EXTENDED, null, null, artifactContent, references); + log.info(String.format("Successfully registered artifact [%s] / [%s]. GlobalId is [%d]", groupId, + artifactId, amd.getVersion().getGlobalId())); return amd; } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractRegistryMojo.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractRegistryMojo.java index 2ef758658d..3d4c77ddb9 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractRegistryMojo.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AbstractRegistryMojo.java @@ -1,16 +1,15 @@ package io.apicurio.registry.maven; +import io.apicurio.registry.client.auth.VertXAuthFactory; +import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.types.ContentTypes; import io.kiota.http.vertx.VertXRequestAdapter; -import io.apicurio.registry.client.auth.VertXAuthFactory; import io.vertx.ext.web.client.WebClient; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Parameter; -import io.apicurio.registry.rest.client.RegistryClient; - import java.util.Locale; import java.util.concurrent.ExecutionException; @@ -18,15 +17,12 @@ import static io.apicurio.registry.client.auth.VertXAuthFactory.buildSimpleAuthWebClient; /** - * Base class for all Registry Mojo's. - * It handles RegistryService's (aka client) lifecycle. - * + * Base class for all Registry Mojo's. It handles RegistryService's (aka client) lifecycle. */ public abstract class AbstractRegistryMojo extends AbstractMojo { /** - * The registry's url. - * e.g. http://localhost:8080/api/v3 + * The registry's url. e.g. http://localhost:8080/api/v3 */ @Parameter(required = true, property = "apicurio.url") String registryUrl; @@ -89,13 +85,15 @@ private void closeClients() { // TODO: check there are no connection leaks etc... } - protected abstract void executeInternal() throws MojoExecutionException, MojoFailureException, ExecutionException, InterruptedException; + protected abstract void executeInternal() + throws MojoExecutionException, MojoFailureException, ExecutionException, InterruptedException; - protected String getContentTypeByExtension(String fileName){ - if(fileName == null) return null; + protected String getContentTypeByExtension(String fileName) { + if (fileName == null) + return null; String[] temp = fileName.split("[.]"); String extension = temp[temp.length - 1]; - switch (extension.toLowerCase(Locale.ROOT)){ + switch (extension.toLowerCase(Locale.ROOT)) { case "avro": case "avsc": case "json": @@ -131,7 +129,9 @@ public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } - public void setClientScope(String clientScope) { this.clientScope = clientScope; } + public void setClientScope(String clientScope) { + this.clientScope = clientScope; + } public void setUsername(String username) { this.username = username; diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AvroDirectoryParser.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AvroDirectoryParser.java index c161122d22..f4e2c7b0ab 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AvroDirectoryParser.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/AvroDirectoryParser.java @@ -1,6 +1,5 @@ package io.apicurio.registry.maven; - import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.content.TypedContent; import io.apicurio.registry.rest.client.RegistryClient; @@ -39,44 +38,57 @@ public ParsedDirectoryWrapper parse(File rootSchemaFile) { } @Override - public List handleSchemaReferences(RegisterArtifact rootArtifact, Schema rootSchema, Map fileContents) throws FileNotFoundException, ExecutionException, InterruptedException { + public List handleSchemaReferences(RegisterArtifact rootArtifact, Schema rootSchema, + Map fileContents) + throws FileNotFoundException, ExecutionException, InterruptedException { Set references = new HashSet<>(); - //Iterate through all the fields of the schema + // Iterate through all the fields of the schema for (Schema.Field field : rootSchema.getFields()) { List nestedArtifactReferences = new ArrayList<>(); - if (field.schema().getType() == Schema.Type.RECORD) { //If the field is a sub-schema, recursively check for nested sub-schemas and register all of them + if (field.schema().getType() == Schema.Type.RECORD) { // If the field is a sub-schema, recursively + // check for nested sub-schemas and register + // all of them RegisterArtifact nestedSchema = buildFromRoot(rootArtifact, field.schema().getFullName()); if (field.schema().hasFields()) { - nestedArtifactReferences = handleSchemaReferences(nestedSchema, field.schema(), fileContents); + nestedArtifactReferences = handleSchemaReferences(nestedSchema, field.schema(), + fileContents); } - references.add(registerNestedSchema(field.schema().getFullName(), nestedArtifactReferences, nestedSchema, fileContents.get(field.schema().getFullName()).getContent().content())); - } else if (field.schema().getType() == Schema.Type.ENUM) { //If the nested schema is an enum, just register + references.add(registerNestedSchema(field.schema().getFullName(), nestedArtifactReferences, + nestedSchema, fileContents.get(field.schema().getFullName()).getContent().content())); + } else if (field.schema().getType() == Schema.Type.ENUM) { // If the nested schema is an enum, + // just register RegisterArtifact nestedSchema = buildFromRoot(rootArtifact, field.schema().getFullName()); - references.add(registerNestedSchema(field.schema().getFullName(), nestedArtifactReferences, nestedSchema, fileContents.get(field.schema().getFullName()).getContent().content())); - } else if (isArrayWithSubschemaElement(field)) { //If the nested schema is an array and the element is a sub-schema, handle it + references.add(registerNestedSchema(field.schema().getFullName(), nestedArtifactReferences, + nestedSchema, fileContents.get(field.schema().getFullName()).getContent().content())); + } else if (isArrayWithSubschemaElement(field)) { // If the nested schema is an array and the + // element is a sub-schema, handle it Schema elementSchema = field.schema().getElementType(); RegisterArtifact nestedSchema = buildFromRoot(rootArtifact, elementSchema.getFullName()); if (elementSchema.hasFields()) { - nestedArtifactReferences = handleSchemaReferences(nestedSchema, elementSchema, fileContents); + nestedArtifactReferences = handleSchemaReferences(nestedSchema, elementSchema, + fileContents); } - references.add(registerNestedSchema(elementSchema.getFullName(), nestedArtifactReferences, nestedSchema, fileContents.get(elementSchema.getFullName()).getContent().content())); + references.add(registerNestedSchema(elementSchema.getFullName(), nestedArtifactReferences, + nestedSchema, fileContents.get(elementSchema.getFullName()).getContent().content())); } } return new ArrayList<>(references); } private ParsedDirectoryWrapper parseDirectory(File directory, File rootSchema) { - Set typesToAdd = Arrays.stream(Objects.requireNonNull(directory.listFiles((dir, name) -> name.endsWith(AVRO_SCHEMA_EXTENSION)))) + Set typesToAdd = Arrays + .stream(Objects.requireNonNull( + directory.listFiles((dir, name) -> name.endsWith(AVRO_SCHEMA_EXTENSION)))) .filter(file -> !file.getName().equals(rootSchema.getName())).collect(Collectors.toSet()); Map processed = new HashMap<>(); @@ -100,31 +112,37 @@ private ParsedDirectoryWrapper parseDirectory(File directory, File rootS schemaContents.put(schema.getFullName(), typedSchemaContent); fileParsed = true; } catch (SchemaParseException ex) { - log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", typeToAdd.getName()); + log.warn( + "Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", + typeToAdd.getName()); } } partialParser = new Schema.Parser(); partialParser.addTypes(processed); - //If no schema has been processed during this iteration, that means there is an error in the configuration, throw exception. + // If no schema has been processed during this iteration, that means there is an error in the + // configuration, throw exception. if (!fileParsed) { - throw new IllegalStateException("Error found in the directory structure. Check that all required files are present."); + throw new IllegalStateException( + "Error found in the directory structure. Check that all required files are present."); } } rootSchemaParser.addTypes(processed); - return new AvroSchemaWrapper(rootSchemaParser.parse(readSchemaContent(rootSchema).content()), schemaContents); + return new AvroSchemaWrapper(rootSchemaParser.parse(readSchemaContent(rootSchema).content()), + schemaContents); } private boolean isArrayWithSubschemaElement(Schema.Field field) { - return field.schema().getType() == Schema.Type.ARRAY && field.schema().getElementType().getType() == Schema.Type.RECORD; + return field.schema().getType() == Schema.Type.ARRAY + && field.schema().getElementType().getType() == Schema.Type.RECORD; } public static class AvroSchemaWrapper implements ParsedDirectoryWrapper { final Schema schema; - final Map fileContents; //Original file contents from the file system. + final Map fileContents; // Original file contents from the file system. public AvroSchemaWrapper(Schema schema, Map fileContents) { this.schema = schema; diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/DownloadRegistryMojo.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/DownloadRegistryMojo.java index 51e974baef..74708e9453 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/DownloadRegistryMojo.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/DownloadRegistryMojo.java @@ -1,18 +1,17 @@ package io.apicurio.registry.maven; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + import java.io.InputStream; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.util.List; import java.util.concurrent.ExecutionException; -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugins.annotations.Mojo; -import org.apache.maven.plugins.annotations.Parameter; - /** * Download artifacts. - * */ @Mojo(name = "download") public class DownloadRegistryMojo extends AbstractRegistryMojo { @@ -34,20 +33,28 @@ protected void validate() throws MojoExecutionException { int errorCount = 0; for (DownloadArtifact artifact : artifacts) { if (artifact.getGroupId() == null) { - getLog().error(String.format("GroupId is required when downloading an artifact. Missing from artifacts[%d].", idx)); + getLog().error(String.format( + "GroupId is required when downloading an artifact. Missing from artifacts[%d].", + idx)); errorCount++; } if (artifact.getArtifactId() == null) { - getLog().error(String.format("ArtifactId is required when downloading an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "ArtifactId is required when downloading an artifact. Missing from artifacts[%s].", + idx)); errorCount++; } if (artifact.getFile() == null) { - getLog().error(String.format("File is required when downloading an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "File is required when downloading an artifact. Missing from artifacts[%s].", + idx)); errorCount++; } else { if (artifact.getFile().exists()) { if (artifact.getOverwrite() == null || artifact.getOverwrite() == false) { - getLog().error(String.format("File being written already exists. Use true to replace the destination file: %s", artifact.getFile().getPath())); + getLog().error(String.format( + "File being written already exists. Use true to replace the destination file: %s", + artifact.getFile().getPath())); errorCount++; } } @@ -57,7 +64,8 @@ protected void validate() throws MojoExecutionException { } if (errorCount > 0) { - throw new MojoExecutionException("Invalid configuration of the Download Artifact(s) mojo. See the output log for details."); + throw new MojoExecutionException( + "Invalid configuration of the Download Artifact(s) mojo. See the output log for details."); } } } @@ -88,9 +96,11 @@ private int downloadArtifact(DownloadArtifact artifact) throws ExecutionExceptio } boolean replaceExisting = artifact.getOverwrite() != null && artifact.getOverwrite(); - getLog().info(String.format("Downloading artifact [%s] / [%s] (version %s).", groupId, artifactId, version)); + getLog().info(String.format("Downloading artifact [%s] / [%s] (version %s).", groupId, artifactId, + version)); - try (InputStream content = getClient().groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().byVersionExpression(version).content().get()) { + try (InputStream content = getClient().groups().byGroupId(groupId).artifacts() + .byArtifactId(artifactId).versions().byVersionExpression(version).content().get()) { if (!artifact.getFile().getParentFile().exists()) { artifact.getFile().getParentFile().mkdirs(); @@ -103,13 +113,16 @@ private int downloadArtifact(DownloadArtifact artifact) throws ExecutionExceptio } } catch (Exception e) { errorCount++; - getLog().error(String.format("Exception while downloading artifact [%s] / [%s]", groupId, artifactId), e); + getLog().error( + String.format("Exception while downloading artifact [%s] / [%s]", groupId, artifactId), + e); } - getLog().info(String.format("Downloaded artifact [%s] / [%s] to %s.", groupId, artifactId, artifact.getFile())); + getLog().info(String.format("Downloaded artifact [%s] / [%s] to %s.", groupId, artifactId, + artifact.getFile())); if (artifact.getArtifactReferences() != null && !artifact.getArtifactReferences().isEmpty()) { - for (DownloadArtifact reference: artifact.getArtifactReferences()) { + for (DownloadArtifact reference : artifact.getArtifactReferences()) { errorCount += downloadArtifact(reference); } } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/JsonSchemaDirectoryParser.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/JsonSchemaDirectoryParser.java index 3e9bfdf248..7d748e5406 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/JsonSchemaDirectoryParser.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/JsonSchemaDirectoryParser.java @@ -43,14 +43,17 @@ public ParsedDirectoryWrapper parse(File rootSchemaFile) { } @Override - public List handleSchemaReferences(RegisterArtifact rootArtifact, org.everit.json.schema.Schema rootSchema, Map fileContents) throws FileNotFoundException, ExecutionException, InterruptedException { + public List handleSchemaReferences(RegisterArtifact rootArtifact, + org.everit.json.schema.Schema rootSchema, Map fileContents) + throws FileNotFoundException, ExecutionException, InterruptedException { if (rootSchema instanceof ObjectSchema) { ObjectSchema objectSchema = (ObjectSchema) rootSchema; Set references = new HashSet<>(); - Map rootSchemaPropertySchemas = objectSchema.getPropertySchemas(); + Map rootSchemaPropertySchemas = objectSchema + .getPropertySchemas(); for (String schemaKey : rootSchemaPropertySchemas.keySet()) { @@ -59,15 +62,18 @@ public List handleSchemaReferences(RegisterArtifact rootArtif if (rootSchemaPropertySchemas.get(schemaKey) instanceof ReferenceSchema) { ReferenceSchema nestedSchema = (ReferenceSchema) rootSchemaPropertySchemas.get(schemaKey); - RegisterArtifact nestedRegisterArtifact = buildFromRoot(rootArtifact, nestedSchema.getSchemaLocation()); + RegisterArtifact nestedRegisterArtifact = buildFromRoot(rootArtifact, + nestedSchema.getSchemaLocation()); if (nestedSchema.getReferredSchema() instanceof ObjectSchema) { ObjectSchema nestedObjectSchema = (ObjectSchema) nestedSchema.getReferredSchema(); - nestedArtifactReferences = handleSchemaReferences(nestedRegisterArtifact, nestedObjectSchema, fileContents); + nestedArtifactReferences = handleSchemaReferences(nestedRegisterArtifact, + nestedObjectSchema, fileContents); } - references.add(registerNestedSchema(nestedSchema.getSchemaLocation(), nestedArtifactReferences, - nestedRegisterArtifact, fileContents.get(nestedSchema.getSchemaLocation()).getContent().content())); + references.add(registerNestedSchema(nestedSchema.getSchemaLocation(), + nestedArtifactReferences, nestedRegisterArtifact, + fileContents.get(nestedSchema.getSchemaLocation()).getContent().content())); } else if (rootSchemaPropertySchemas.get(schemaKey) instanceof ArraySchema) { @@ -75,14 +81,17 @@ public List handleSchemaReferences(RegisterArtifact rootArtif if (arraySchema.getAllItemSchema() instanceof ReferenceSchema) { ReferenceSchema arrayElementSchema = (ReferenceSchema) arraySchema.getAllItemSchema(); - RegisterArtifact nestedRegisterArtifact = buildFromRoot(rootArtifact, arrayElementSchema.getSchemaLocation()); + RegisterArtifact nestedRegisterArtifact = buildFromRoot(rootArtifact, + arrayElementSchema.getSchemaLocation()); if (arrayElementSchema.getReferredSchema() instanceof ObjectSchema) { - nestedArtifactReferences = handleSchemaReferences(nestedRegisterArtifact, arrayElementSchema, fileContents); + nestedArtifactReferences = handleSchemaReferences(nestedRegisterArtifact, + arrayElementSchema, fileContents); } - references.add(registerNestedSchema(arrayElementSchema.getSchemaLocation(), nestedArtifactReferences, - nestedRegisterArtifact, fileContents.get(arrayElementSchema.getSchemaLocation()).getContent().content())); + references.add(registerNestedSchema(arrayElementSchema.getSchemaLocation(), + nestedArtifactReferences, nestedRegisterArtifact, fileContents + .get(arrayElementSchema.getSchemaLocation()).getContent().content())); } } } @@ -93,7 +102,9 @@ public List handleSchemaReferences(RegisterArtifact rootArtif } private JsonSchemaWrapper parseDirectory(File directory, File rootSchema) { - Set typesToAdd = Arrays.stream(Objects.requireNonNull(directory.listFiles((dir, name) -> name.endsWith(JSON_SCHEMA_EXTENSION)))) + Set typesToAdd = Arrays + .stream(Objects.requireNonNull( + directory.listFiles((dir, name) -> name.endsWith(JSON_SCHEMA_EXTENSION)))) .filter(file -> !file.getName().equals(rootSchema.getName())).collect(Collectors.toSet()); Map processed = new HashMap<>(); @@ -107,24 +118,31 @@ private JsonSchemaWrapper parseDirectory(File directory, File rootSchema) { } try { final ContentHandle schemaContent = readSchemaContent(typeToAdd); - final TypedContent typedSchemaContent = TypedContent.create(schemaContent, ContentTypes.APPLICATION_JSON); + final TypedContent typedSchemaContent = TypedContent.create(schemaContent, + ContentTypes.APPLICATION_JSON); final Schema schema = JsonUtil.readSchema(schemaContent.content(), schemaContents, false); processed.put(schema.getId(), schema); schemaContents.put(schema.getId(), typedSchemaContent); fileParsed = true; } catch (JsonProcessingException ex) { - log.warn("Error processing json schema with name {}. This usually means that the references are not ready yet to parse it", typeToAdd.getName()); + log.warn( + "Error processing json schema with name {}. This usually means that the references are not ready yet to parse it", + typeToAdd.getName()); } } - //If no schema has been processed during this iteration, that means there is an error in the configuration, throw exception. + // If no schema has been processed during this iteration, that means there is an error in the + // configuration, throw exception. if (!fileParsed) { - throw new IllegalStateException("Error found in the directory structure. Check that all required files are present."); + throw new IllegalStateException( + "Error found in the directory structure. Check that all required files are present."); } } try { - return new JsonSchemaWrapper(JsonUtil.readSchema(readSchemaContent(rootSchema).content(), schemaContents, false), schemaContents); + return new JsonSchemaWrapper( + JsonUtil.readSchema(readSchemaContent(rootSchema).content(), schemaContents, false), + schemaContents); } catch (JsonProcessingException e) { throw new RuntimeException("Unable to parse main schema", e); } @@ -151,4 +169,3 @@ public Map getSchemaContents() { } } - diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/MergePropertiesMojo.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/MergePropertiesMojo.java index 6751df6af4..899f92d26a 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/MergePropertiesMojo.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/MergePropertiesMojo.java @@ -1,5 +1,11 @@ package io.apicurio.registry.maven; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugin.MojoFailureException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + import java.io.File; import java.io.FileReader; import java.io.FileWriter; @@ -7,21 +13,15 @@ import java.util.List; import java.util.Properties; -import org.apache.maven.plugin.AbstractMojo; -import org.apache.maven.plugin.MojoExecutionException; -import org.apache.maven.plugin.MojoFailureException; -import org.apache.maven.plugins.annotations.Mojo; -import org.apache.maven.plugins.annotations.Parameter; - @Mojo(name = "merge") public class MergePropertiesMojo extends AbstractMojo { @Parameter(required = true) File output; - + @Parameter(required = true) List inputs; - + @Parameter(required = false, defaultValue = "false") Boolean deleteInputs; @@ -36,12 +36,12 @@ public void execute() throws MojoExecutionException, MojoFailureException { if (inputs == null || inputs.isEmpty()) { throw new MojoExecutionException("Invalid 'inputs'. Must be a collection of input files."); } - + Properties mergedProps = new Properties(); // Read all the input properties files getLog().info("Reading " + inputs.size() + " input files."); for (File input : inputs) { - if (!input.isFile() ) { + if (!input.isFile()) { throw new MojoExecutionException("Invalid input file: " + input.getAbsolutePath()); } Properties inputProps = new Properties(); @@ -57,7 +57,7 @@ public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("Deleted input file: " + input.getName()); } } - + // Write out the merged properties to the output file. if (output.isFile()) { output.delete(); @@ -66,7 +66,8 @@ public void execute() throws MojoExecutionException, MojoFailureException { mergedProps.store(writer, "Properties merged by 'apicurio-registry-maven-plugin'"); getLog().info("Merged properties written to: " + output.getName()); } catch (Throwable t) { - throw new MojoExecutionException("Failed to write merged properties to: " + output.getAbsolutePath()); + throw new MojoExecutionException( + "Failed to write merged properties to: " + output.getAbsolutePath()); } } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java index d5270db84a..94e539afb3 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java @@ -36,72 +36,93 @@ public ProtobufDirectoryParser(RegistryClient client) { @Override public ParsedDirectoryWrapper parse(File protoFile) { - Set protoFiles = Arrays.stream(Objects.requireNonNull(protoFile.getParentFile().listFiles((dir, name) -> name.endsWith(PROTO_SCHEMA_EXTENSION)))) - .filter(file -> !file.getName().equals(protoFile.getName())) - .collect(Collectors.toSet()); + Set protoFiles = Arrays + .stream(Objects.requireNonNull(protoFile.getParentFile() + .listFiles((dir, name) -> name.endsWith(PROTO_SCHEMA_EXTENSION)))) + .filter(file -> !file.getName().equals(protoFile.getName())).collect(Collectors.toSet()); try { final Map requiredSchemaDefs = new HashMap<>(); - final Descriptors.FileDescriptor schemaDescriptor = FileDescriptorUtils.parseProtoFileWithDependencies(protoFile, protoFiles, requiredSchemaDefs); - assert allDependenciesHaveSamePackageName(requiredSchemaDefs, schemaDescriptor.getPackage()) : "All dependencies must have the same package name as the main proto file"; - Map schemaContents = convertSchemaDefs(requiredSchemaDefs, schemaDescriptor.getPackage()); + final Descriptors.FileDescriptor schemaDescriptor = FileDescriptorUtils + .parseProtoFileWithDependencies(protoFile, protoFiles, requiredSchemaDefs); + assert allDependenciesHaveSamePackageName(requiredSchemaDefs, schemaDescriptor.getPackage()) + : "All dependencies must have the same package name as the main proto file"; + Map schemaContents = convertSchemaDefs(requiredSchemaDefs, + schemaDescriptor.getPackage()); return new DescriptorWrapper(schemaDescriptor, schemaContents); } catch (Descriptors.DescriptorValidationException e) { throw new RuntimeException("Failed to read schema file: " + protoFile, e); } catch (FileDescriptorUtils.ReadSchemaException e) { - log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to read it", e.file()); + log.warn( + "Error processing Avro schema with name {}. This usually means that the references are not ready yet to read it", + e.file()); throw new RuntimeException(e.getCause()); } catch (FileDescriptorUtils.ParseSchemaException e) { - log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", e.fileName()); + log.warn( + "Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", + e.fileName()); throw new RuntimeException(e.getCause()); } } - private static boolean allDependenciesHaveSamePackageName(Map schemas, String mainProtoPackageName) { - return schemas.keySet().stream().allMatch(fullDepName -> fullDepName.contains(mainProtoPackageName)); + private static boolean allDependenciesHaveSamePackageName(Map schemas, + String mainProtoPackageName) { + return schemas.keySet().stream().allMatch(fullDepName -> fullDepName.contains(mainProtoPackageName)); } /** - * Converts the schema definitions to a map of ContentHandle, stripping any package information from the key, - * which is not needed for the schema registry, given that the dependent schemas are *always* in the same package - * of the main proto file. + * Converts the schema definitions to a map of ContentHandle, stripping any package information from the + * key, which is not needed for the schema registry, given that the dependent schemas are *always* in the + * same package of the main proto file. */ - private Map convertSchemaDefs(Map requiredSchemaDefs, String mainProtoPackageName) { + private Map convertSchemaDefs(Map requiredSchemaDefs, + String mainProtoPackageName) { if (requiredSchemaDefs.isEmpty()) { return Map.of(); } Map schemaDefs = new HashMap<>(requiredSchemaDefs.size()); for (Map.Entry entry : requiredSchemaDefs.entrySet()) { String fileName = FileDescriptorUtils.extractProtoFileName(entry.getKey()); - TypedContent content = TypedContent.create(ContentHandle.create(entry.getValue()), ContentTypes.APPLICATION_PROTOBUF); + TypedContent content = TypedContent.create(ContentHandle.create(entry.getValue()), + ContentTypes.APPLICATION_PROTOBUF); if (schemaDefs.put(fileName, content) != null) { - log.warn("There's a clash of dependency name, likely due to stripping the expected package name ie {}: dependencies: {}", - mainProtoPackageName, Arrays.toString(requiredSchemaDefs.keySet().toArray(new Object[0]))); + log.warn( + "There's a clash of dependency name, likely due to stripping the expected package name ie {}: dependencies: {}", + mainProtoPackageName, + Arrays.toString(requiredSchemaDefs.keySet().toArray(new Object[0]))); } } return schemaDefs; } @Override - public List handleSchemaReferences(RegisterArtifact rootArtifact, Descriptors.FileDescriptor protoSchema, Map fileContents) throws FileNotFoundException, InterruptedException, ExecutionException { + public List handleSchemaReferences(RegisterArtifact rootArtifact, + Descriptors.FileDescriptor protoSchema, Map fileContents) + throws FileNotFoundException, InterruptedException, ExecutionException { Set references = new HashSet<>(); - final Set baseDeps = new HashSet<>(Arrays.asList(FileDescriptorUtils.baseDependencies())); - final ProtoFileElement rootSchemaElement = FileDescriptorUtils.fileDescriptorToProtoFile(protoSchema.toProto()); + final Set baseDeps = new HashSet<>( + Arrays.asList(FileDescriptorUtils.baseDependencies())); + final ProtoFileElement rootSchemaElement = FileDescriptorUtils + .fileDescriptorToProtoFile(protoSchema.toProto()); for (Descriptors.FileDescriptor dependency : protoSchema.getDependencies()) { List nestedArtifactReferences = new ArrayList<>(); - String dependencyFullName = dependency.getPackage() + "/" + dependency.getName(); //FIXME find a better wat to do this - if (!baseDeps.contains(dependency) && rootSchemaElement.getImports().contains(dependencyFullName)) { + String dependencyFullName = dependency.getPackage() + "/" + dependency.getName(); // FIXME find a + // better wat to + // do this + if (!baseDeps.contains(dependency) + && rootSchemaElement.getImports().contains(dependencyFullName)) { RegisterArtifact nestedArtifact = buildFromRoot(rootArtifact, dependencyFullName); if (!dependency.getDependencies().isEmpty()) { - nestedArtifactReferences = handleSchemaReferences(nestedArtifact, dependency, fileContents); + nestedArtifactReferences = handleSchemaReferences(nestedArtifact, dependency, + fileContents); } - references.add(registerNestedSchema(dependencyFullName, nestedArtifactReferences, nestedArtifact, - fileContents.get(dependency.getName()).getContent().content())); + references.add(registerNestedSchema(dependencyFullName, nestedArtifactReferences, + nestedArtifact, fileContents.get(dependency.getName()).getContent().content())); } } @@ -110,9 +131,11 @@ public List handleSchemaReferences(RegisterArtifact rootArtif public static class DescriptorWrapper implements ParsedDirectoryWrapper { final Descriptors.FileDescriptor fileDescriptor; - final Map schemaContents; //used to store the original file content to register the content as-is. + final Map schemaContents; // used to store the original file content to register + // the content as-is. - public DescriptorWrapper(Descriptors.FileDescriptor fileDescriptor, Map schemaContents) { + public DescriptorWrapper(Descriptors.FileDescriptor fileDescriptor, + Map schemaContents) { this.fileDescriptor = fileDescriptor; this.schemaContents = schemaContents; } @@ -127,4 +150,3 @@ public Map getSchemaContents() { } } } - diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterArtifact.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterArtifact.java index 29fb8079ad..7fffe5482d 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterArtifact.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterArtifact.java @@ -149,7 +149,7 @@ public String getContentType() { /** * @param contentType the contentType to set */ - public void setContentType(String contentType){ + public void setContentType(String contentType) { this.contentType = contentType; } @@ -178,7 +178,7 @@ public void setAnalyzeDirectory(Boolean analyzeDirectory) { public Boolean getAutoRefs() { return autoRefs; } - + public void setAutoRefs(Boolean autoRefs) { this.autoRefs = autoRefs; } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterRegistryMojo.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterRegistryMojo.java index 95b3f10ed6..35d6ec1f31 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterRegistryMojo.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/RegisterRegistryMojo.java @@ -45,7 +45,6 @@ /** * Register artifacts against registry. - * */ @Mojo(name = "register") public class RegisterRegistryMojo extends AbstractRegistryMojo { @@ -88,18 +87,25 @@ protected boolean validate() throws MojoExecutionException { int errorCount = 0; for (RegisterArtifact artifact : artifacts) { if (artifact.getGroupId() == null) { - getLog().error(String.format("GroupId is required when registering an artifact. Missing from artifacts[%d].", idx)); + getLog().error(String.format( + "GroupId is required when registering an artifact. Missing from artifacts[%d].", + idx)); errorCount++; } if (artifact.getArtifactId() == null) { - getLog().error(String.format("ArtifactId is required when registering an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "ArtifactId is required when registering an artifact. Missing from artifacts[%s].", + idx)); errorCount++; } if (artifact.getFile() == null) { - getLog().error(String.format("File is required when registering an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "File is required when registering an artifact. Missing from artifacts[%s].", idx)); errorCount++; } else if (!artifact.getFile().exists()) { - getLog().error(String.format("Artifact file to register is configured but file does not exist: %s", artifact.getFile().getPath())); + getLog().error( + String.format("Artifact file to register is configured but file does not exist: %s", + artifact.getFile().getPath())); errorCount++; } @@ -107,7 +113,8 @@ protected boolean validate() throws MojoExecutionException { } if (errorCount > 0) { - throw new MojoExecutionException("Invalid configuration of the Register Artifact(s) mojo. See the output log for details."); + throw new MojoExecutionException( + "Invalid configuration of the Register Artifact(s) mojo. See the output log for details."); } return true; } @@ -121,19 +128,35 @@ protected void executeInternal() throws MojoExecutionException { String artifactId = artifact.getArtifactId(); try { if (artifact.getAutoRefs() != null && artifact.getAutoRefs()) { - // If we have references, then we'll need to create the local resource index and then process all refs. + // If we have references, then we'll need to create the local resource index and then + // process all refs. ReferenceIndex index = createIndex(artifact.getFile()); addExistingReferencesToIndex(index, existingReferences); addExistingReferencesToIndex(index, artifact.getExistingReferences()); Stack registrationStack = new Stack<>(); registerWithAutoRefs(artifact, index, registrationStack); - } else if (artifact.getAnalyzeDirectory() != null && artifact.getAnalyzeDirectory()) { //Auto register selected, we must figure out if the artifact has reference using the directory structure + } else if (artifact.getAnalyzeDirectory() != null && artifact.getAnalyzeDirectory()) { // Auto + // register + // selected, + // we + // must + // figure + // out + // if + // the + // artifact + // has + // reference + // using + // the + // directory + // structure registerDirectory(artifact); } else { List references = new ArrayList<>(); - //First, we check if the artifact being processed has references defined + // First, we check if the artifact being processed has references defined if (hasReferences(artifact)) { references = registerArtifactReferences(artifact.getReferences()); } @@ -141,7 +164,8 @@ protected void executeInternal() throws MojoExecutionException { } } catch (Exception e) { errorCount++; - getLog().error(String.format("Exception while registering artifact [%s] / [%s]", groupId, artifactId), e); + getLog().error(String.format("Exception while registering artifact [%s] / [%s]", groupId, + artifactId), e); } } @@ -152,9 +176,12 @@ protected void executeInternal() throws MojoExecutionException { } } - private CreateArtifactResponse registerWithAutoRefs(RegisterArtifact artifact, ReferenceIndex index, Stack registrationStack) throws IOException, ExecutionException, InterruptedException { + private CreateArtifactResponse registerWithAutoRefs(RegisterArtifact artifact, ReferenceIndex index, + Stack registrationStack) + throws IOException, ExecutionException, InterruptedException { if (loopDetected(artifact, registrationStack)) { - throw new RuntimeException("Artifact reference loop detected (not supported): " + printLoop(registrationStack)); + throw new RuntimeException( + "Artifact reference loop detected (not supported): " + printLoop(registrationStack)); } registrationStack.push(artifact); @@ -164,17 +191,21 @@ private CreateArtifactResponse registerWithAutoRefs(RegisterArtifact artifact, R TypedContent typedArtifactContent = TypedContent.create(artifactContent, artifactContentType); // Find all references in the content - ArtifactTypeUtilProvider provider = this.utilProviderFactory.getArtifactTypeProvider(artifact.getArtifactType()); + ArtifactTypeUtilProvider provider = this.utilProviderFactory + .getArtifactTypeProvider(artifact.getArtifactType()); ReferenceFinder referenceFinder = provider.getReferenceFinder(); - Set externalReferences = referenceFinder.findExternalReferences(typedArtifactContent); + Set externalReferences = referenceFinder + .findExternalReferences(typedArtifactContent); // Register all of the references first, then register the artifact. List registeredReferences = externalReferences.stream().map(externalRef -> { - IndexedResource iresource = index.lookup(externalRef.getResource(), Paths.get(artifact.getFile().toURI())); + IndexedResource iresource = index.lookup(externalRef.getResource(), + Paths.get(artifact.getFile().toURI())); // TODO: need a way to resolve references that are not local (already registered in the registry) if (iresource == null) { - throw new RuntimeException("Reference could not be resolved. From: " + artifact.getFile().getName() + " To: " + externalRef.getFullReference()); + throw new RuntimeException("Reference could not be resolved. From: " + + artifact.getFile().getName() + " To: " + externalRef.getFullReference()); } // If the resource isn't already registered, then register it now. @@ -209,41 +240,53 @@ private CreateArtifactResponse registerWithAutoRefs(RegisterArtifact artifact, R return registerArtifact(artifact, registeredReferences); } - private void registerDirectory(RegisterArtifact artifact) throws IOException, ExecutionException, InterruptedException { + private void registerDirectory(RegisterArtifact artifact) + throws IOException, ExecutionException, InterruptedException { switch (artifact.getArtifactType()) { case ArtifactType.AVRO: final AvroDirectoryParser avroDirectoryParser = new AvroDirectoryParser(getClient()); final ParsedDirectoryWrapper schema = avroDirectoryParser.parse(artifact.getFile()); - registerArtifact(artifact, avroDirectoryParser.handleSchemaReferences(artifact, schema.getSchema(), schema.getSchemaContents())); + registerArtifact(artifact, avroDirectoryParser.handleSchemaReferences(artifact, + schema.getSchema(), schema.getSchemaContents())); break; case ArtifactType.PROTOBUF: - final ProtobufDirectoryParser protobufDirectoryParser = new ProtobufDirectoryParser(getClient()); - final ParsedDirectoryWrapper protoSchema = protobufDirectoryParser.parse(artifact.getFile()); - registerArtifact(artifact, protobufDirectoryParser.handleSchemaReferences(artifact, protoSchema.getSchema(), protoSchema.getSchemaContents())); + final ProtobufDirectoryParser protobufDirectoryParser = new ProtobufDirectoryParser( + getClient()); + final ParsedDirectoryWrapper protoSchema = protobufDirectoryParser + .parse(artifact.getFile()); + registerArtifact(artifact, protobufDirectoryParser.handleSchemaReferences(artifact, + protoSchema.getSchema(), protoSchema.getSchemaContents())); break; case ArtifactType.JSON: - final JsonSchemaDirectoryParser jsonSchemaDirectoryParser = new JsonSchemaDirectoryParser(getClient()); - final ParsedDirectoryWrapper jsonSchema = jsonSchemaDirectoryParser.parse(artifact.getFile()); - registerArtifact(artifact, jsonSchemaDirectoryParser.handleSchemaReferences(artifact, jsonSchema.getSchema(), jsonSchema.getSchemaContents())); + final JsonSchemaDirectoryParser jsonSchemaDirectoryParser = new JsonSchemaDirectoryParser( + getClient()); + final ParsedDirectoryWrapper jsonSchema = jsonSchemaDirectoryParser + .parse(artifact.getFile()); + registerArtifact(artifact, jsonSchemaDirectoryParser.handleSchemaReferences(artifact, + jsonSchema.getSchema(), jsonSchema.getSchemaContents())); break; default: - throw new IllegalArgumentException(String.format("Artifact type not recognized for analyzing a directory structure %s", artifact.getArtifactType())); + throw new IllegalArgumentException( + String.format("Artifact type not recognized for analyzing a directory structure %s", + artifact.getArtifactType())); } } - private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, List references) throws - FileNotFoundException, ExecutionException, InterruptedException { + private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, + List references) + throws FileNotFoundException, ExecutionException, InterruptedException { return registerArtifact(artifact, new FileInputStream(artifact.getFile()), references); } private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, InputStream artifactContent, - List references) throws ExecutionException, InterruptedException { + List references) throws ExecutionException, InterruptedException { String groupId = artifact.getGroupId(); String artifactId = artifact.getArtifactId(); String version = artifact.getVersion(); String type = artifact.getArtifactType(); Boolean canonicalize = artifact.getCanonicalize(); - String ct = artifact.getContentType() == null ? ContentTypes.APPLICATION_JSON : artifact.getContentType(); + String ct = artifact.getContentType() == null ? ContentTypes.APPLICATION_JSON + : artifact.getContentType(); String data = null; try { if (artifact.getMinify() != null && artifact.getMinify()) { @@ -278,18 +321,15 @@ private CreateArtifactResponse registerArtifact(RegisterArtifact artifact, Input }).collect(Collectors.toList())); createVersion.setContent(content); - var vmd = getClient() - .groups() - .byGroupId(groupId) - .artifacts() - .post(createArtifact, config -> { - if (artifact.getIfExists() != null) { - config.queryParameters.ifExists = IfArtifactExists.forValue(artifact.getIfExists().value()); - } - config.queryParameters.canonical = canonicalize; - }); + var vmd = getClient().groups().byGroupId(groupId).artifacts().post(createArtifact, config -> { + if (artifact.getIfExists() != null) { + config.queryParameters.ifExists = IfArtifactExists.forValue(artifact.getIfExists().value()); + } + config.queryParameters.canonical = canonicalize; + }); - getLog().info(String.format("Successfully registered artifact [%s] / [%s]. GlobalId is [%d]", groupId, artifactId, vmd.getVersion().getGlobalId())); + getLog().info(String.format("Successfully registered artifact [%s] / [%s]. GlobalId is [%d]", + groupId, artifactId, vmd.getVersion().getGlobalId())); return vmd; } @@ -298,12 +338,14 @@ private static boolean hasReferences(RegisterArtifact artifact) { return artifact.getReferences() != null && !artifact.getReferences().isEmpty(); } - private List registerArtifactReferences - (List referencedArtifacts) throws FileNotFoundException, ExecutionException, InterruptedException { + private List registerArtifactReferences( + List referencedArtifacts) + throws FileNotFoundException, ExecutionException, InterruptedException { List references = new ArrayList<>(); for (RegisterArtifactReference artifact : referencedArtifacts) { List nestedReferences = new ArrayList<>(); - //First, we check if the artifact being processed has references defined, and register them if needed + // First, we check if the artifact being processed has references defined, and register them if + // needed if (hasReferences(artifact)) { nestedReferences = registerArtifactReferences(artifact.getReferences()); } @@ -322,7 +364,8 @@ public void setSkip(boolean skip) { this.skip = skip; } - private static ArtifactReference buildReferenceFromMetadata(VersionMetaData metaData, String referenceName) { + private static ArtifactReference buildReferenceFromMetadata(VersionMetaData metaData, + String referenceName) { ArtifactReference reference = new ArtifactReference(); reference.setName(referenceName); reference.setArtifactId(metaData.getArtifactId()); @@ -333,6 +376,7 @@ private static ArtifactReference buildReferenceFromMetadata(VersionMetaData meta /** * Create a local index relative to the given file location. + * * @param file */ private static ReferenceIndex createIndex(File file) { @@ -344,12 +388,15 @@ private static ReferenceIndex createIndex(File file) { return index; } - private void addExistingReferencesToIndex(ReferenceIndex index, List existingReferences) throws ExecutionException, InterruptedException { + private void addExistingReferencesToIndex(ReferenceIndex index, + List existingReferences) throws ExecutionException, InterruptedException { if (existingReferences != null && !existingReferences.isEmpty()) { for (ExistingReference ref : existingReferences) { VersionMetaData vmd; if (ref.getVersion() == null || "LATEST".equalsIgnoreCase(ref.getVersion())) { - vmd = getClient().groups().byGroupId(ref.getGroupId()).artifacts().byArtifactId(ref.getArtifactId()).versions().byVersionExpression("branch=latest").get(); + vmd = getClient().groups().byGroupId(ref.getGroupId()).artifacts() + .byArtifactId(ref.getArtifactId()).versions().byVersionExpression("branch=latest") + .get(); } else { vmd = new VersionMetaData(); vmd.setGroupId(ref.getGroupId()); @@ -357,7 +404,8 @@ private void addExistingReferencesToIndex(ReferenceIndex index, List registrationStack) { + private static boolean loopDetected(RegisterArtifact artifact, + Stack registrationStack) { for (RegisterArtifact stackArtifact : registrationStack) { if (artifact.getFile().equals(stackArtifact.getFile())) { return true; @@ -402,7 +452,8 @@ private static boolean loopDetected(RegisterArtifact artifact, Stack registrationStack) { - return registrationStack.stream().map(artifact -> artifact.getFile().getName()).collect(Collectors.joining(" -> ")); + return registrationStack.stream().map(artifact -> artifact.getFile().getName()) + .collect(Collectors.joining(" -> ")); } } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestArtifact.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestArtifact.java index c28f3aa946..3e80f908f1 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestArtifact.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestArtifact.java @@ -67,7 +67,7 @@ public String getContentType() { /** * @param contentType the contentType to set */ - public void setContentType(String contentType){ + public void setContentType(String contentType) { this.contentType = contentType; } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestUpdateRegistryMojo.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestUpdateRegistryMojo.java index c63c68a8d1..45ab0a86eb 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestUpdateRegistryMojo.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/TestUpdateRegistryMojo.java @@ -12,9 +12,7 @@ import java.util.List; /** - * Test artifact against current artifact rules, - * if an update is possible / valid. - * + * Test artifact against current artifact rules, if an update is possible / valid. */ @Mojo(name = "test-update") public class TestUpdateRegistryMojo extends AbstractRegistryMojo { @@ -36,18 +34,25 @@ protected void validate() throws MojoExecutionException { int errorCount = 0; for (TestArtifact artifact : artifacts) { if (artifact.getGroupId() == null) { - getLog().error(String.format("GroupId is required when testing an artifact. Missing from artifacts[%d].", idx)); + getLog().error(String.format( + "GroupId is required when testing an artifact. Missing from artifacts[%d].", + idx)); errorCount++; } if (artifact.getArtifactId() == null) { - getLog().error(String.format("ArtifactId is required when testing an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "ArtifactId is required when testing an artifact. Missing from artifacts[%s].", + idx)); errorCount++; } if (artifact.getFile() == null) { - getLog().error(String.format("File is required when testing an artifact. Missing from artifacts[%s].", idx)); + getLog().error(String.format( + "File is required when testing an artifact. Missing from artifacts[%s].", idx)); errorCount++; } else if (!artifact.getFile().isFile()) { - getLog().error(String.format("Artifact file to test is configured but file does not exist or is not a file: %s", artifact.getFile().getPath())); + getLog().error(String.format( + "Artifact file to test is configured but file does not exist or is not a file: %s", + artifact.getFile().getPath())); errorCount++; } @@ -55,7 +60,8 @@ protected void validate() throws MojoExecutionException { } if (errorCount > 0) { - throw new MojoExecutionException("Invalid configuration of the Test Update Artifact(s) mojo. See the output log for details."); + throw new MojoExecutionException( + "Invalid configuration of the Test Update Artifact(s) mojo. See the output log for details."); } } } @@ -76,13 +82,18 @@ protected void executeInternal() throws MojoExecutionException { cv.setContent(new VersionContent()); cv.getContent().setContentType(contentType); cv.getContent().setContent(content); - getClient().groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions().post(cv, config -> { - config.queryParameters.dryRun = true; - }); - getLog().info(String.format("[%s] / [%s] :: Artifact successfully tested (updating is allowed for the given content).", groupId, artifactId)); + getClient().groups().byGroupId(groupId).artifacts().byArtifactId(artifactId).versions() + .post(cv, config -> { + config.queryParameters.dryRun = true; + }); + getLog().info(String.format( + "[%s] / [%s] :: Artifact successfully tested (updating is allowed for the given content).", + groupId, artifactId)); } catch (Exception e) { errorCount++; - getLog().error(String.format("[%s] / [%s] :: Artifact test FAILED (updating is not allowed for the given content).", groupId, artifactId), e); + getLog().error(String.format( + "[%s] / [%s] :: Artifact test FAILED (updating is not allowed for the given content).", + groupId, artifactId), e); } } } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/IndexedResource.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/IndexedResource.java index 4fd8239e46..cef3bb26ad 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/IndexedResource.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/IndexedResource.java @@ -1,12 +1,12 @@ package io.apicurio.registry.maven.refs; -import java.nio.file.Path; -import java.util.Set; - import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.rest.client.models.VersionMetaData; import io.apicurio.registry.types.ArtifactType; +import java.nio.file.Path; +import java.util.Set; + public class IndexedResource { private final Path path; @@ -17,6 +17,7 @@ public class IndexedResource { /** * Constructor. + * * @param path * @param type * @param resourceName @@ -29,21 +30,21 @@ public IndexedResource(Path path, String type, String resourceName, ContentHandl this.type = type; this.resourceName = resourceName; } - + /** * @return the content */ public ContentHandle getContent() { return content; } - + /** * @return the type */ public String getType() { return type; } - + /** * @return the resourceName */ @@ -78,7 +79,8 @@ public boolean matches(String resourceName, Path relativeToFile, Set schem // Protobuf can resolve relative to the "schema paths" (aka --proto-paths in protoc). if (!resolves && ArtifactType.PROTOBUF.equals(this.type)) { - resolves = schemaPaths.parallelStream().anyMatch(path -> this.path.normalize().equals(path.resolve(resourceName).normalize())); + resolves = schemaPaths.parallelStream() + .anyMatch(path -> this.path.normalize().equals(path.resolve(resourceName).normalize())); } return resolves; } diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/ReferenceIndex.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/ReferenceIndex.java index 7091c7de8a..acd906bd2c 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/ReferenceIndex.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/refs/ReferenceIndex.java @@ -1,12 +1,7 @@ package io.apicurio.registry.maven.refs; -import java.nio.file.Path; -import java.util.HashSet; -import java.util.Set; - import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; - import io.apicurio.datamodels.Library; import io.apicurio.datamodels.models.Document; import io.apicurio.datamodels.util.ModelTypeUtil; @@ -15,19 +10,18 @@ import io.apicurio.registry.types.ArtifactType; import io.apicurio.registry.utils.protobuf.schema.ProtobufFile; +import java.nio.file.Path; +import java.util.HashSet; +import java.util.Set; + /** - * An index of the files available when discovering references in an artifact. This index is - * typically populated by getting a list of all files in a directory, or zip file. - * - * The index maps a resource name (this will vary depending on the artifact type) to the - * content of the resource. For example, Avro schemas will have resource names based on the - * qualified name of the type they define. JSON Schemas will have resources names based on - * the name of the file. - * - * The intent of this index is to resolve an external reference found in an artifact to an - * actual piece of content (e.g. file) in the index. If it cannot be resolved, that would + * An index of the files available when discovering references in an artifact. This index is typically + * populated by getting a list of all files in a directory, or zip file. The index maps a resource name (this + * will vary depending on the artifact type) to the content of the resource. For example, Avro schemas will + * have resource names based on the qualified name of the type they define. JSON Schemas will have resources + * names based on the name of the file. The intent of this index is to resolve an external reference found in + * an artifact to an actual piece of content (e.g. file) in the index. If it cannot be resolved, that would * typically mean that there is a broken reference in the schema/design. - * */ public class ReferenceIndex { @@ -35,21 +29,22 @@ public class ReferenceIndex { private Set index = new HashSet<>(); private Set schemaPaths = new HashSet<>(); - + /** * Constructor. */ public ReferenceIndex() { } - + /** * Constructor. + * * @param schemaPath */ public ReferenceIndex(Path schemaPath) { this.schemaPaths.add(schemaPath); } - + /** * @param path */ @@ -58,17 +53,19 @@ public void addSchemaPath(Path path) { } /** - * Look up a resource in the index. Returns null if no resource with that - * name is found. + * Look up a resource in the index. Returns null if no resource with that name is found. + * * @param resourceName * @param relativeToFile */ public IndexedResource lookup(String resourceName, Path relativeToFile) { - return index.stream().filter(resource -> resource.matches(resourceName, relativeToFile, schemaPaths)).findFirst().orElse(null); + return index.stream().filter(resource -> resource.matches(resourceName, relativeToFile, schemaPaths)) + .findFirst().orElse(null); } /** * Index an existing (remote) reference using a resource name and remote artifact metadata. + * * @param resourceName * @param vmd */ @@ -79,15 +76,15 @@ public void index(String resourceName, VersionMetaData vmd) { } /** - * Index the given content. Indexing will parse the content and figure out its resource - * name and type. + * Index the given content. Indexing will parse the content and figure out its resource name and type. + * * @param path * @param content */ public void index(Path path, ContentHandle content) { try { JsonNode tree = mapper.readTree(content.content()); - + // OpenAPI if (tree.has("openapi") || tree.has("swagger") || tree.has("asyncapi")) { indexDataModels(path, content); @@ -101,7 +98,7 @@ public void index(Path path, ContentHandle content) { } catch (Exception e) { // Must not be JSON... } - + try { indexProto(path, content); return; @@ -121,7 +118,7 @@ private void indexAvro(Path path, ContentHandle content, JsonNode parsed) { private void indexProto(Path path, ContentHandle content) { ProtobufFile.toProtoFileElement(content.content()); - + IndexedResource resource = new IndexedResource(path, ArtifactType.PROTOBUF, null, content); this.index.add(resource); } @@ -140,12 +137,12 @@ private void indexDataModels(Path path, ContentHandle content) { if (doc == null) { throw new UnsupportedOperationException("Content is not OpenAPI or AsyncAPI."); } - + String type = ArtifactType.OPENAPI; if (ModelTypeUtil.isAsyncApiModel(doc)) { type = ArtifactType.ASYNCAPI; } - + IndexedResource resource = new IndexedResource(path, type, null, content); this.index.add(resource); } diff --git a/utils/maven-plugin/src/test/resources/test-builds/download/pom.xml b/utils/maven-plugin/src/test/resources/test-builds/download/pom.xml index e46ee2aa58..9c4a91ce20 100644 --- a/utils/maven-plugin/src/test/resources/test-builds/download/pom.xml +++ b/utils/maven-plugin/src/test/resources/test-builds/download/pom.xml @@ -1,56 +1,56 @@ - - 4.0.0 + + + 4.0.0 - io.apicurio - apicurio-test-maven-plugin - pom - 2.0.0-SNAPSHOT + io.apicurio + apicurio-test-maven-plugin + 2.0.0-SNAPSHOT + pom - - UTF-8 - yyyy-MM-dd HH:mm:ss - ${maven.build.timestamp} + + UTF-8 + yyyy-MM-dd HH:mm:ss + ${maven.build.timestamp} - 1.8 - 1.8 + 1.8 + 1.8 - 2.0.0-SNAPSHOT - + 2.0.0-SNAPSHOT + - - - - io.apicurio - apicurio-registry-maven-plugin - ${apicurio.version} - - - generate-sources - - download - - - http://localhost:8080/api/v3 - - - TestGroup - FullNameRecord - ${project.build.directory}/classes/record.avsc - true - - - TestGroup - ExampleAPI - 1 - ${project.build.directory}/classes/example.graphql - true - - - - - - - - + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio.version} + + + + download + + generate-sources + + http://localhost:8080/api/v3 + + + TestGroup + FullNameRecord + ${project.build.directory}/classes/record.avsc + true + + + TestGroup + ExampleAPI + 1 + ${project.build.directory}/classes/example.graphql + true + + + + + + + + diff --git a/utils/maven-plugin/src/test/resources/test-builds/register/pom.xml b/utils/maven-plugin/src/test/resources/test-builds/register/pom.xml index b33d69218a..c928236e94 100644 --- a/utils/maven-plugin/src/test/resources/test-builds/register/pom.xml +++ b/utils/maven-plugin/src/test/resources/test-builds/register/pom.xml @@ -1,57 +1,57 @@ - - 4.0.0 + + + 4.0.0 - io.apicurio - apicurio-test-maven-plugin - pom - 2.0.0-SNAPSHOT + io.apicurio + apicurio-test-maven-plugin + 2.0.0-SNAPSHOT + pom - - UTF-8 - yyyy-MM-dd HH:mm:ss - ${maven.build.timestamp} + + UTF-8 + yyyy-MM-dd HH:mm:ss + ${maven.build.timestamp} - 1.8 - 1.8 + 1.8 + 1.8 - 2.0.0-SNAPSHOT - + 2.0.0-SNAPSHOT + - - - - io.apicurio - apicurio-registry-maven-plugin - ${apicurio.version} - - - generate-sources - - register - - - http://localhost:8080/api/v3 - - - TestGroup - FullNameRecord - ${project.basedir}/src/main/resources/schemas/record.avsc - FAIL - - - TestGroup - ExampleAPI - GRAPHQL - ${project.basedir}/src/main/resources/apis/example.graphql - RETURN_OR_UPDATE - true - - - - - - - - + + + + io.apicurio + apicurio-registry-maven-plugin + ${apicurio.version} + + + + register + + generate-sources + + http://localhost:8080/api/v3 + + + TestGroup + FullNameRecord + ${project.basedir}/src/main/resources/schemas/record.avsc + FAIL + + + TestGroup + ExampleAPI + GRAPHQL + ${project.basedir}/src/main/resources/apis/example.graphql + RETURN_OR_UPDATE + true + + + + + + + + diff --git a/utils/protobuf-schema-utilities/pom.xml b/utils/protobuf-schema-utilities/pom.xml index 5deb961ef5..779f2dac66 100644 --- a/utils/protobuf-schema-utilities/pom.xml +++ b/utils/protobuf-schema-utilities/pom.xml @@ -1,179 +1,174 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + - apicurio-registry-protobuf-schema-utilities - jar - apicurio-registry-protobuf-schema-utilities + apicurio-registry-protobuf-schema-utilities + jar + apicurio-registry-protobuf-schema-utilities - - - com.google.protobuf - protobuf-java - + + + com.google.protobuf + protobuf-java + - - - com.google.api.grpc - proto-google-common-protos - + + + com.google.api.grpc + proto-google-common-protos + + + com.squareup.wire + wire-schema + - - com.squareup.wire - wire-schema - + + com.squareup.wire + wire-compiler + + + org.jetbrains.kotlinx + kotlinx-serialization-core + + + + + org.jetbrains.kotlinx + kotlinx-serialization-core + + + com.ibm.icu + icu4j + - - com.squareup.wire - wire-compiler - - - kotlinx-serialization-core - org.jetbrains.kotlinx - - - - - kotlinx-serialization-core - org.jetbrains.kotlinx - - - com.ibm.icu - icu4j - + + com.squareup.okio + okio-jvm + - - com.squareup.okio - okio-jvm - + + com.squareup.okio + okio-fakefilesystem + - - com.squareup.okio - okio-fakefilesystem - + + org.junit.jupiter + junit-jupiter + test + + + + com.google.truth.extensions + truth-proto-extension + test + + - - org.junit.jupiter - junit-jupiter - test - - - - com.google.truth.extensions - truth-proto-extension - test - - + - + + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.target} + ${maven.compiler.target} + true + false + false + + + + jdk8-support + + compile + + + 8 + 1.8 + false + false + false + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + true + + + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + detect + + initialize + + + - - - org.apache.maven.plugins - maven-compiler-plugin - - ${maven.compiler.target} - ${maven.compiler.target} - true - false - false - - - - jdk8-support - - compile - - - 8 - 1.8 - false - false - false - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - true - - - - - - kr.motd.maven - os-maven-plugin - 1.7.1 - - - initialize - - detect - - - - + + org.xolstice.maven.plugins + protobuf-maven-plugin + ${proto-plugin.version} + true + + + gencode + + compile + test-compile + + generate-sources + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-dist + + copy-resources + + prepare-package + + ${project.build.outputDirectory} + + + ${project.basedir}/target/generated-test-sources/protobuf/ + false + + + + + + + + - - org.xolstice.maven.plugins - protobuf-maven-plugin - ${proto-plugin.version} - true - - - gencode - generate-sources - - compile - test-compile - - - - com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} - - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - copy-dist - prepare-package - - copy-resources - - - ${project.build.outputDirectory} - - - ${project.basedir}/target/generated-test-sources/protobuf/ - false - - - - - - - - - - - + diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/DynamicSchema.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/DynamicSchema.java index 9073f40ad5..7e7661db72 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/DynamicSchema.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/DynamicSchema.java @@ -103,7 +103,7 @@ public Descriptors.Descriptor getMessageDescriptor(String msgTypeName) { * Gets the enum value for the given enum type and name * * @param enumTypeName the enum type name - * @param enumName the enum name + * @param enumName the enum name * @return the enum value descriptor (null if not found) */ public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, String enumName) { @@ -118,7 +118,7 @@ public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, String * Gets the enum value for the given enum type and number * * @param enumTypeName the enum type name - * @param enumNumber the enum number + * @param enumNumber the enum number * @return the enum value descriptor (null if not found) */ public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, int enumNumber) { @@ -183,7 +183,8 @@ public String toString() { // --- private --- - private DynamicSchema(DescriptorProtos.FileDescriptorSet fileDescSet) throws Descriptors.DescriptorValidationException { + private DynamicSchema(DescriptorProtos.FileDescriptorSet fileDescSet) + throws Descriptors.DescriptorValidationException { mFileDescSet = fileDescSet; Map fileDescMap = init(fileDescSet); @@ -228,7 +229,7 @@ private Map init(DescriptorProtos.FileDescri // getDependencyList() signature was changed and broke compatibility in 2.6.1; workaround // with reflection - //List dependencyList = fdProto.getDependencyList(); + // List dependencyList = fdProto.getDependencyList(); List dependencyList = null; try { Method m = fdProto.getClass().getMethod("getDependencyList", (Class[]) null); @@ -240,8 +241,8 @@ private Map init(DescriptorProtos.FileDescri List resolvedFdList = new ArrayList(); for (String depName : dependencyList) { if (!allFdProtoNames.contains(depName)) { - throw new IllegalArgumentException("cannot resolve import " + depName + " in " + fdProto - .getName()); + throw new IllegalArgumentException( + "cannot resolve import " + depName + " in " + fdProto.getName()); } Descriptors.FileDescriptor fd = resolvedFileDescMap.get(depName); if (fd != null) { @@ -251,7 +252,8 @@ private Map init(DescriptorProtos.FileDescri if (resolvedFdList.size() == dependencyList.size()) { // dependencies resolved Descriptors.FileDescriptor[] fds = new Descriptors.FileDescriptor[resolvedFdList.size()]; - Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom(fdProto, resolvedFdList.toArray(fds)); + Descriptors.FileDescriptor fd = Descriptors.FileDescriptor.buildFrom(fdProto, + resolvedFdList.toArray(fds)); resolvedFileDescMap.put(fdProto.getName(), fd); } } @@ -260,12 +262,8 @@ private Map init(DescriptorProtos.FileDescri return resolvedFileDescMap; } - private void addMessageType( - Descriptors.Descriptor msgType, - String scope, - Set msgDupes, - Set enumDupes - ) { + private void addMessageType(Descriptors.Descriptor msgType, String scope, Set msgDupes, + Set enumDupes) { String msgTypeNameFull = msgType.getFullName(); String msgTypeNameShort = (scope == null ? msgType.getName() : scope + "." + msgType.getName()); @@ -289,10 +287,7 @@ private void addMessageType( private void addEnumType(Descriptors.EnumDescriptor enumType, String scope, Set enumDupes) { String enumTypeNameFull = enumType.getFullName(); - String enumTypeNameShort = ( - scope == null - ? enumType.getName() - : scope + "." + enumType.getName()); + String enumTypeNameShort = (scope == null ? enumType.getName() : scope + "." + enumType.getName()); if (mEnumDescriptorMapFull.containsKey(enumTypeNameFull)) { throw new IllegalArgumentException("duplicate name: " + enumTypeNameFull); @@ -308,10 +303,8 @@ private void addEnumType(Descriptors.EnumDescriptor enumType, String scope, Set< private DescriptorProtos.FileDescriptorSet mFileDescSet; private Map mMsgDescriptorMapFull = new HashMap(); private Map mMsgDescriptorMapShort = new HashMap(); - private Map mEnumDescriptorMapFull = new HashMap(); - private Map mEnumDescriptorMapShort = new HashMap(); + private Map mEnumDescriptorMapFull = new HashMap(); + private Map mEnumDescriptorMapShort = new HashMap(); /** * DynamicSchema.Builder @@ -325,7 +318,8 @@ public static class Builder { * @return the schema object */ public DynamicSchema build() throws Descriptors.DescriptorValidationException { - DescriptorProtos.FileDescriptorSet.Builder fileDescSetBuilder = DescriptorProtos.FileDescriptorSet.newBuilder(); + DescriptorProtos.FileDescriptorSet.Builder fileDescSetBuilder = DescriptorProtos.FileDescriptorSet + .newBuilder(); fileDescSetBuilder.addFile(mFileDescProtoBuilder.build()); fileDescSetBuilder.mergeFrom(mFileDescSetBuilder.build()); return new DynamicSchema(fileDescSetBuilder.build()); @@ -377,8 +371,7 @@ public Builder addPublicDependency(String dependency) { // Note: added public Builder setJavaPackage(String javaPackage) { - DescriptorProtos.FileOptions.Builder optionsBuilder = - DescriptorProtos.FileOptions.newBuilder(); + DescriptorProtos.FileOptions.Builder optionsBuilder = DescriptorProtos.FileOptions.newBuilder(); optionsBuilder.setJavaPackage(javaPackage); mFileDescProtoBuilder.mergeOptions(optionsBuilder.build()); return this; @@ -386,8 +379,7 @@ public Builder setJavaPackage(String javaPackage) { // Note: added public Builder setJavaOuterClassname(String javaOuterClassname) { - DescriptorProtos.FileOptions.Builder optionsBuilder = - DescriptorProtos.FileOptions.newBuilder(); + DescriptorProtos.FileOptions.Builder optionsBuilder = DescriptorProtos.FileOptions.newBuilder(); optionsBuilder.setJavaOuterClassname(javaOuterClassname); mFileDescProtoBuilder.mergeOptions(optionsBuilder.build()); return this; @@ -395,8 +387,7 @@ public Builder setJavaOuterClassname(String javaOuterClassname) { // Note: added public Builder setJavaMultipleFiles(boolean javaMultipleFiles) { - DescriptorProtos.FileOptions.Builder optionsBuilder = - DescriptorProtos.FileOptions.newBuilder(); + DescriptorProtos.FileOptions.Builder optionsBuilder = DescriptorProtos.FileOptions.newBuilder(); optionsBuilder.setJavaMultipleFiles(javaMultipleFiles); mFileDescProtoBuilder.mergeOptions(optionsBuilder.build()); return this; diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/EnumDefinition.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/EnumDefinition.java index e99316b5bd..d1532f84e0 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/EnumDefinition.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/EnumDefinition.java @@ -40,7 +40,8 @@ public static class Builder { // --- public --- public Builder addValue(String name, int num) { - DescriptorProtos.EnumValueDescriptorProto.Builder enumValBuilder = DescriptorProtos.EnumValueDescriptorProto.newBuilder(); + DescriptorProtos.EnumValueDescriptorProto.Builder enumValBuilder = DescriptorProtos.EnumValueDescriptorProto + .newBuilder(); enumValBuilder.setName(name).setNumber(num); mEnumTypeBuilder.addValue(enumValBuilder.build()); return this; @@ -56,8 +57,8 @@ private Builder(String enumName, Boolean allowAlias) { mEnumTypeBuilder = DescriptorProtos.EnumDescriptorProto.newBuilder(); mEnumTypeBuilder.setName(enumName); if (allowAlias != null) { - DescriptorProtos.EnumOptions.Builder optionsBuilder = - DescriptorProtos.EnumOptions.newBuilder(); + DescriptorProtos.EnumOptions.Builder optionsBuilder = DescriptorProtos.EnumOptions + .newBuilder(); optionsBuilder.setAllowAlias(allowAlias); mEnumTypeBuilder.mergeOptions(optionsBuilder.build()); } diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java index a3fe722c98..77daa85f70 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java @@ -8,11 +8,11 @@ import com.google.protobuf.Descriptors.FileDescriptor; import com.google.type.*; import com.squareup.wire.Syntax; +import com.squareup.wire.schema.*; import com.squareup.wire.schema.Field; import com.squareup.wire.schema.Schema; import com.squareup.wire.schema.Service; import com.squareup.wire.schema.Type; -import com.squareup.wire.schema.*; import com.squareup.wire.schema.internal.parser.*; import kotlin.ranges.IntRange; import metadata.ProtobufSchemaMetadata; @@ -79,46 +79,30 @@ public class FileDescriptorUtils { private static final FileDescriptor[] WELL_KNOWN_DEPENDENCIES; static { - //Support all the Protobuf WellKnownTypes - //and the protos from Google API, https://github.com/googleapis/googleapis - WELL_KNOWN_DEPENDENCIES = new FileDescriptor[]{ - ApiProto.getDescriptor().getFile(), - FieldMaskProto.getDescriptor().getFile(), - SourceContextProto.getDescriptor().getFile(), - StructProto.getDescriptor().getFile(), - TypeProto.getDescriptor().getFile(), - TimestampProto.getDescriptor().getFile(), - WrappersProto.getDescriptor().getFile(), - AnyProto.getDescriptor().getFile(), - EmptyProto.getDescriptor().getFile(), - DurationProto.getDescriptor().getFile(), - TimeOfDayProto.getDescriptor().getFile(), - DateProto.getDescriptor().getFile(), - CalendarPeriodProto.getDescriptor().getFile(), - ColorProto.getDescriptor().getFile(), - DayOfWeek.getDescriptor().getFile(), - LatLng.getDescriptor().getFile(), - FractionProto.getDescriptor().getFile(), - MoneyProto.getDescriptor().getFile(), - MonthProto.getDescriptor().getFile(), - PhoneNumberProto.getDescriptor().getFile(), - PostalAddressProto.getDescriptor().getFile(), - CalendarPeriodProto.getDescriptor().getFile(), - LocalizedTextProto.getDescriptor().getFile(), - IntervalProto.getDescriptor().getFile(), - ExprProto.getDescriptor().getFile(), - QuaternionProto.getDescriptor().getFile(), - PostalAddressProto.getDescriptor().getFile(), - ProtobufSchemaMetadata.getDescriptor().getFile(), - Decimals.getDescriptor().getFile() - }; + // Support all the Protobuf WellKnownTypes + // and the protos from Google API, https://github.com/googleapis/googleapis + WELL_KNOWN_DEPENDENCIES = new FileDescriptor[] { ApiProto.getDescriptor().getFile(), + FieldMaskProto.getDescriptor().getFile(), SourceContextProto.getDescriptor().getFile(), + StructProto.getDescriptor().getFile(), TypeProto.getDescriptor().getFile(), + TimestampProto.getDescriptor().getFile(), WrappersProto.getDescriptor().getFile(), + AnyProto.getDescriptor().getFile(), EmptyProto.getDescriptor().getFile(), + DurationProto.getDescriptor().getFile(), TimeOfDayProto.getDescriptor().getFile(), + DateProto.getDescriptor().getFile(), CalendarPeriodProto.getDescriptor().getFile(), + ColorProto.getDescriptor().getFile(), DayOfWeek.getDescriptor().getFile(), + LatLng.getDescriptor().getFile(), FractionProto.getDescriptor().getFile(), + MoneyProto.getDescriptor().getFile(), MonthProto.getDescriptor().getFile(), + PhoneNumberProto.getDescriptor().getFile(), PostalAddressProto.getDescriptor().getFile(), + CalendarPeriodProto.getDescriptor().getFile(), LocalizedTextProto.getDescriptor().getFile(), + IntervalProto.getDescriptor().getFile(), ExprProto.getDescriptor().getFile(), + QuaternionProto.getDescriptor().getFile(), PostalAddressProto.getDescriptor().getFile(), + ProtobufSchemaMetadata.getDescriptor().getFile(), Decimals.getDescriptor().getFile() }; } public static FileDescriptor[] baseDependencies() { return WELL_KNOWN_DEPENDENCIES.clone(); } - //Parse a self-contained descriptor proto just with the base dependencies. + // Parse a self-contained descriptor proto just with the base dependencies. public static FileDescriptor protoFileToFileDescriptor(FileDescriptorProto descriptorProto) throws DescriptorValidationException { Objects.requireNonNull(descriptorProto); @@ -128,7 +112,8 @@ public static FileDescriptor protoFileToFileDescriptor(FileDescriptorProto descr private static Map mutableBaseDependenciesByName(int ensureCapacity) { // return a map using WELL_KNOWN_DEPENDENCIES to populate it - final Map deps = new HashMap<>(WELL_KNOWN_DEPENDENCIES.length + ensureCapacity); + final Map deps = new HashMap<>( + WELL_KNOWN_DEPENDENCIES.length + ensureCapacity); for (FileDescriptor fd : WELL_KNOWN_DEPENDENCIES) { deps.put(fd.getName(), fd); } @@ -140,7 +125,8 @@ public static FileDescriptor protoFileToFileDescriptor(ProtoFileElement element) return protoFileToFileDescriptor(element, "default.proto"); } - public static FileDescriptor protoFileToFileDescriptor(ProtoFileElement element, String protoFileName) throws DescriptorValidationException { + public static FileDescriptor protoFileToFileDescriptor(ProtoFileElement element, String protoFileName) + throws DescriptorValidationException { Objects.requireNonNull(element); Objects.requireNonNull(protoFileName); @@ -148,16 +134,18 @@ public static FileDescriptor protoFileToFileDescriptor(ProtoFileElement element, Optional.ofNullable(element.getPackageName())); } - public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, String protoFileName, Optional optionalPackageName) - throws DescriptorValidationException { + public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, String protoFileName, + Optional optionalPackageName) throws DescriptorValidationException { Objects.requireNonNull(schemaDefinition); Objects.requireNonNull(protoFileName); - return FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, Collections.emptyMap()), baseDependencies()); + return FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, + optionalPackageName, Collections.emptyMap()), baseDependencies()); } - public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, String protoFileName, Optional optionalPackageName, Map schemaDefs, Map dependencies) - throws DescriptorValidationException { + public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, String protoFileName, + Optional optionalPackageName, Map schemaDefs, + Map dependencies) throws DescriptorValidationException { Objects.requireNonNull(schemaDefinition); Objects.requireNonNull(protoFileName); @@ -165,9 +153,12 @@ public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, final Set joinedDependencies = new HashSet<>(baseDependencies); joinedDependencies.addAll(dependencies.values()); - Descriptors.FileDescriptor[] dependenciesArray = new Descriptors.FileDescriptor[joinedDependencies.size()]; + Descriptors.FileDescriptor[] dependenciesArray = new Descriptors.FileDescriptor[joinedDependencies + .size()]; - return FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, schemaDefs), joinedDependencies.toArray(dependenciesArray)); + return FileDescriptor.buildFrom( + toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, schemaDefs), + joinedDependencies.toArray(dependenciesArray)); } public static final class ReadSchemaException extends Exception { @@ -184,7 +175,8 @@ public File file() { } /** - * Same as {@link #parseProtoFileWithDependencies(File, Set, Map)}, but with {@code requiredSchemaDeps} set to {@code null}. + * Same as {@link #parseProtoFileWithDependencies(File, Set, Map)}, but with {@code requiredSchemaDeps} + * set to {@code null}. */ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies) throws DescriptorValidationException, ReadSchemaException, ParseSchemaException { @@ -192,27 +184,28 @@ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, } /** - * Same as {@link #parseProtoFileWithDependencies(File, Set, Map, boolean)}, but with {@code failFast} set to {@code true} - * and {@code requiredSchemaDeps} set to {@code null}. + * Same as {@link #parseProtoFileWithDependencies(File, Set, Map, boolean)}, but with {@code failFast} set + * to {@code true} and {@code requiredSchemaDeps} set to {@code null}. */ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies, - Map requiredSchemaDeps) + Map requiredSchemaDeps) throws ReadSchemaException, DescriptorValidationException, ParseSchemaException { return parseProtoFileWithDependencies(mainProtoFile, dependencies, requiredSchemaDeps, true); } /** - * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve any - * transitive dependency.
- * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as soon as - * any parsing error happen in the list of provided dependencies, regardless been required or not, or it will proceed - * until a required dependency cannot be resolved.
- * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which keys are in the - * form of {@code packageName/fileName} and the value is the schema definition of the dependency. + * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve + * any transitive dependency.
+ * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as + * soon as any parsing error happen in the list of provided dependencies, regardless been required or not, + * or it will proceed until a required dependency cannot be resolved.
+ * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which + * keys are in the form of {@code packageName/fileName} and the value is the schema definition of the + * dependency. */ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies, - Map requiredSchemaDeps, boolean failFast) + Map requiredSchemaDeps, boolean failFast) throws DescriptorValidationException, ReadSchemaException, ParseSchemaException { Objects.requireNonNull(mainProtoFile); Objects.requireNonNull(dependencies); @@ -230,7 +223,8 @@ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, } final ProtoFileElement mainProtoElement; try { - mainProtoElement = ProtoParser.Companion.parse(Location.get(mainProtoFile.getAbsolutePath()), schemaDefinition); + mainProtoElement = ProtoParser.Companion.parse(Location.get(mainProtoFile.getAbsolutePath()), + schemaDefinition); } catch (Throwable t) { throw new ParseSchemaException(mainProtoFile.getName(), t); } @@ -242,7 +236,7 @@ public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, } private static void readAndParseSchemas(Collection schemas, Map schemaContents, - Map protoFileElements, boolean failFast) + Map protoFileElements, boolean failFast) throws ReadSchemaException, ParseSchemaException { Objects.requireNonNull(schemas); for (File schema : schemas) { @@ -257,7 +251,8 @@ private static void readAndParseSchemas(Collection schemas, Map schemas, Map schemaContents, - Map protoFileElements, boolean failFast) throws ParseSchemaException { + private static void parseSchemas(Collection schemas, + Map schemaContents, Map protoFileElements, + boolean failFast) throws ParseSchemaException { Objects.requireNonNull(schemas); for (ProtobufSchemaContent schema : schemas) { final ProtoFileElement protoFile; @@ -328,56 +324,55 @@ private static void parseSchemas(Collection schemas, Map< } /** - * Same as {@link #parseProtoFileWithDependencies(ProtobufSchemaContent, Collection, Map, boolean)}, - * but with {@code failFast} set to {@code true} and {@code requiredSchemaDeps} set to {@code null}. + * Same as {@link #parseProtoFileWithDependencies(ProtobufSchemaContent, Collection, Map, boolean)}, but + * with {@code failFast} set to {@code true} and {@code requiredSchemaDeps} set to {@code null}. */ public static FileDescriptor parseProtoFileWithDependencies(ProtobufSchemaContent mainProtoFile, - Collection dependencies) + Collection dependencies) throws DescriptorValidationException, ParseSchemaException { return parseProtoFileWithDependencies(mainProtoFile, dependencies, null, true); } /** - * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve any - * transitive dependency.
- * Both the dependencies and the main proto file must be provided as {@link ProtobufSchemaContent}, still unparsed, - * and which {@link ProtobufSchemaContent#fileName()} doesn't require to specify the package name, automatically - * later resolved by parsing {@link ProtobufSchemaContent#schemaDefinition()}.
- * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as soon as - * any parsing error happen in the list of provided dependencies, regardless been required or not, or it will proceed - * until a required dependency cannot be resolved.
- * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which keys are in the - * form of {@code packageName/fileName} and the value is the schema definition of the dependency. + * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve + * any transitive dependency.
+ * Both the dependencies and the main proto file must be provided as {@link ProtobufSchemaContent}, still + * unparsed, and which {@link ProtobufSchemaContent#fileName()} doesn't require to specify the package + * name, automatically later resolved by parsing {@link ProtobufSchemaContent#schemaDefinition()}.
+ * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as + * soon as any parsing error happen in the list of provided dependencies, regardless been required or not, + * or it will proceed until a required dependency cannot be resolved.
+ * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which + * keys are in the form of {@code packageName/fileName} and the value is the schema definition of the + * dependency. */ public static FileDescriptor parseProtoFileWithDependencies(ProtobufSchemaContent mainProtoFile, - Collection dependencies, - Map requiredSchemaDeps, - boolean failFast) - throws DescriptorValidationException, ParseSchemaException { + Collection dependencies, Map requiredSchemaDeps, + boolean failFast) throws DescriptorValidationException, ParseSchemaException { Objects.requireNonNull(mainProtoFile); Objects.requireNonNull(dependencies); - final Map resolvedDependencies = mutableBaseDependenciesByName(dependencies.size()); + final Map resolvedDependencies = mutableBaseDependenciesByName( + dependencies.size()); final Map schemaDefinitions = new HashMap<>(dependencies.size()); final Map protoFileElements = new HashMap<>(dependencies.size()); parseSchemas(dependencies, schemaDefinitions, protoFileElements, failFast); final ProtoFileElement mainProtoElement; try { - mainProtoElement = ProtoParser.Companion.parse(DEFAULT_LOCATION, mainProtoFile.schemaDefinition()); + mainProtoElement = ProtoParser.Companion.parse(DEFAULT_LOCATION, + mainProtoFile.schemaDefinition()); } catch (Throwable t) { throw new ParseSchemaException(mainProtoFile.fileName(), t); } - return resolveFileDescriptor(mainProtoElement, mainProtoFile.schemaDefinition(), mainProtoFile.fileName(), - schemaDefinitions, resolvedDependencies, requiredSchemaDeps, new HashSet<>(), protoFileElements); + return resolveFileDescriptor(mainProtoElement, mainProtoFile.schemaDefinition(), + mainProtoFile.fileName(), schemaDefinitions, resolvedDependencies, requiredSchemaDeps, + new HashSet<>(), protoFileElements); } private static FileDescriptor resolveFileDescriptor(ProtoFileElement mainProtoElement, - String schemaDefinition, - String protoFileName, - Map schemaDefinitions, - Map resolvedDependencies, - Map requiredDependentSchemas, - Set unresolvedImportNames, - Map cachedProtoFileDependencies) throws DescriptorValidationException { + String schemaDefinition, String protoFileName, Map schemaDefinitions, + Map resolvedDependencies, Map requiredDependentSchemas, + Set unresolvedImportNames, Map cachedProtoFileDependencies) + throws DescriptorValidationException { final String mainProtoImportName = toProtoFullName(mainProtoElement, protoFileName); if (!unresolvedImportNames.add(mainProtoImportName)) { // TODO we can do better here, we can actually print the whole chain of dependencies @@ -400,13 +395,15 @@ private static FileDescriptor resolveFileDescriptor(ProtoFileElement mainProtoEl // We could end up here because of: // - fail-fast is false and some error happened while reading/parsing schemas // - the schema wasn't in the dependencies - // In both cases we can just ignore the required dependency and let the validation fail later + // In both cases we can just ignore the required dependency and let the validation fail + // later continue; } final String fileName = extractProtoFileName(depFullName); // try reuse the existing requiredDependentSchemas: // in case of a chain of single-children dependencies it means reusing the same map! - final Map requiredSubDependencies = requiredDependentSchemas.isEmpty() ? requiredDependentSchemas : new HashMap<>(); + final Map requiredSubDependencies = requiredDependentSchemas.isEmpty() + ? requiredDependentSchemas : new HashMap<>(); final ProtoFileElement protoFile; if (cachedProtoFileDependencies != null) { protoFile = cachedProtoFileDependencies.get(depFullName); @@ -414,19 +411,23 @@ private static FileDescriptor resolveFileDescriptor(ProtoFileElement mainProtoEl // We could end up here because of: // - fail-fast is false and some error happened while reading/parsing schemas // - the schema wasn't in the dependencies - // In both cases we can just ignore the required dependency and let the validation fail later + // In both cases we can just ignore the required dependency and let the validation fail + // later if (protoFile == null) { continue; } } else { protoFile = ProtoParser.Companion.parse(DEFAULT_LOCATION, schemaDep); } - fdDep = resolveFileDescriptor(protoFile, schemaDep, fileName, schemaDefinitions, resolvedDependencies, requiredSubDependencies, unresolvedImportNames, cachedProtoFileDependencies); + fdDep = resolveFileDescriptor(protoFile, schemaDep, fileName, schemaDefinitions, + resolvedDependencies, requiredSubDependencies, unresolvedImportNames, + cachedProtoFileDependencies); // no need to add anything if (requiredDependentSchemas != requiredSubDependencies) { requiredDependentSchemas.putAll(requiredSubDependencies); } - // we have accumulated new requiredSubDependencies, we need to add them to the requiredDependentSchemas + // we have accumulated new requiredSubDependencies, we need to add them to the + // requiredDependentSchemas resolvedDependencies.put(depFullName, fdDep); } // this is the case of a well-known dependency @@ -439,8 +440,10 @@ private static FileDescriptor resolveFileDescriptor(ProtoFileElement mainProtoEl final boolean removed = unresolvedImportNames.remove(mainProtoImportName); assert removed : "unresolvedNames should contain depName"; // TODO we risk to have few dependencies files to be re-written in a whole new in-memory fs - Descriptors.FileDescriptor mainProtoFd = FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, - Optional.ofNullable(mainProtoElement.getPackageName()), requiredDependentSchemas), directDependencyFds); + Descriptors.FileDescriptor mainProtoFd = FileDescriptor.buildFrom( + toFileDescriptorProto(schemaDefinition, protoFileName, + Optional.ofNullable(mainProtoElement.getPackageName()), requiredDependentSchemas), + directDependencyFds); return mainProtoFd; } @@ -449,7 +452,8 @@ private static String toProtoFullName(ProtoFileElement protoFile, String protoFi } /** - * Extract the proto file name out of a full proto file name, which is in the form of {@code packageName/fileName}. + * Extract the proto file name out of a full proto file name, which is in the form of + * {@code packageName/fileName}. */ public static String extractProtoFileName(String protoFullName) { int beforeStartFileName = protoFullName.lastIndexOf('/'); @@ -462,16 +466,18 @@ public static String extractProtoFileName(String protoFullName) { return fileName; } - private static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String protoFileName, Optional optionalPackageName) { - return toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, Collections.emptyMap()); + private static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String protoFileName, + Optional optionalPackageName) { + return toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, + Collections.emptyMap()); } - - public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String protoFileName, Optional optionalPackageName, Map deps) { + public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String protoFileName, + Optional optionalPackageName, Map deps) { final ProtobufSchemaLoader.ProtobufSchemaLoaderContext protobufSchemaLoaderContext; try { - protobufSchemaLoaderContext = - ProtobufSchemaLoader.loadSchema(optionalPackageName, protoFileName, schemaDefinition, deps); + protobufSchemaLoaderContext = ProtobufSchemaLoader.loadSchema(optionalPackageName, protoFileName, + schemaDefinition, deps); } catch (Exception e) { throw new RuntimeException(e); } @@ -498,8 +504,8 @@ public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, Type type = schemaContext.getType(protoType); if (type instanceof MessageType) { - DescriptorProto - message = messageElementToDescriptorProto((MessageType) type, schemaContext, element); + DescriptorProto message = messageElementToDescriptorProto((MessageType) type, schemaContext, + element); schema.addMessageType(message); } else if (type instanceof EnumType) { EnumDescriptorProto message = enumElementToProto((EnumType) type); @@ -512,7 +518,7 @@ public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, schema.addService(serviceDescriptorProto); } - //dependencies on protobuf default types are always added + // dependencies on protobuf default types are always added for (String ref : element.getImports()) { schema.addDependency(ref); } @@ -533,110 +539,128 @@ public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String javaPackageName = findOptionString(JAVA_PACKAGE_OPTION, element.getOptions()); if (javaPackageName != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaPackage(javaPackageName).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaPackage(javaPackageName) + .build(); schema.mergeOptions(options); } String javaOuterClassname = findOptionString(JAVA_OUTER_CLASSNAME_OPTION, element.getOptions()); if (javaOuterClassname != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaOuterClassname(javaOuterClassname).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setJavaOuterClassname(javaOuterClassname).build(); schema.mergeOptions(options); } Boolean javaMultipleFiles = findOptionBoolean(JAVA_MULTIPLE_FILES_OPTION, element.getOptions()); if (javaMultipleFiles != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaMultipleFiles(javaMultipleFiles).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setJavaMultipleFiles(javaMultipleFiles).build(); schema.mergeOptions(options); } Boolean javaStringCheckUtf8 = findOptionBoolean(JAVA_STRING_CHECK_UTF8_OPTION, element.getOptions()); if (javaStringCheckUtf8 != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaStringCheckUtf8(javaStringCheckUtf8).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setJavaStringCheckUtf8(javaStringCheckUtf8).build(); schema.mergeOptions(options); } Boolean javaGenericServices = findOptionBoolean(JAVA_GENERIC_SERVICES_OPTION, element.getOptions()); if (javaGenericServices != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setJavaGenericServices(javaGenericServices).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setJavaGenericServices(javaGenericServices).build(); schema.mergeOptions(options); } Boolean ccGenericServices = findOptionBoolean(CC_GENERIC_SERVICES_OPTION, element.getOptions()); if (ccGenericServices != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setCcGenericServices(ccGenericServices).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setCcGenericServices(ccGenericServices).build(); schema.mergeOptions(options); } Boolean ccEnableArenas = findOptionBoolean(CC_ENABLE_ARENAS_OPTION, element.getOptions()); if (ccEnableArenas != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setCcEnableArenas(ccEnableArenas).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setCcEnableArenas(ccEnableArenas) + .build(); schema.mergeOptions(options); } String csharpNamespace = findOptionString(CSHARP_NAMESPACE_OPTION, element.getOptions()); if (csharpNamespace != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setCsharpNamespace(csharpNamespace).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setCsharpNamespace(csharpNamespace).build(); schema.mergeOptions(options); } String goPackageName = findOptionString(GO_PACKAGE_OPTION, element.getOptions()); if (goPackageName != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setGoPackage(goPackageName).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setGoPackage(goPackageName) + .build(); schema.mergeOptions(options); } String objcClassPrefix = findOptionString(OBJC_CLASS_PREFIX_OPTION, element.getOptions()); if (objcClassPrefix != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setObjcClassPrefix(objcClassPrefix).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setObjcClassPrefix(objcClassPrefix).build(); schema.mergeOptions(options); } Boolean phpGenericServices = findOptionBoolean(PHP_GENERIC_SERVICES_OPTION, element.getOptions()); if (phpGenericServices != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpGenericServices(phpGenericServices).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setPhpGenericServices(phpGenericServices).build(); schema.mergeOptions(options); } String phpClassPrefix = findOptionString(PHP_CLASS_PREFIX_OPTION, element.getOptions()); if (phpClassPrefix != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpClassPrefix(phpClassPrefix).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpClassPrefix(phpClassPrefix) + .build(); schema.mergeOptions(options); } String phpMetadataNamespace = findOptionString(PHP_METADATA_NAMESPACE_OPTION, element.getOptions()); if (phpMetadataNamespace != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpMetadataNamespace(phpMetadataNamespace).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setPhpMetadataNamespace(phpMetadataNamespace).build(); schema.mergeOptions(options); } String phpNamespace = findOptionString(PHP_NAMESPACE_OPTION, element.getOptions()); if (phpNamespace != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpNamespace(phpNamespace).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPhpNamespace(phpNamespace) + .build(); schema.mergeOptions(options); } Boolean pyGenericServices = findOptionBoolean(PY_GENERIC_SERVICES_OPTION, element.getOptions()); if (pyGenericServices != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setPyGenericServices(pyGenericServices).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder() + .setPyGenericServices(pyGenericServices).build(); schema.mergeOptions(options); } String rubyPackage = findOptionString(RUBY_PACKAGE_OPTION, element.getOptions()); if (rubyPackage != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setRubyPackage(rubyPackage).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setRubyPackage(rubyPackage) + .build(); schema.mergeOptions(options); } String swiftPrefix = findOptionString(SWIFT_PREFIX_OPTION, element.getOptions()); if (swiftPrefix != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setSwiftPrefix(swiftPrefix).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setSwiftPrefix(swiftPrefix) + .build(); schema.mergeOptions(options); } FileOptions.OptimizeMode optimizeFor = findOption(OPTIMIZE_FOR_OPTION, element.getOptions()) .map(o -> FileOptions.OptimizeMode.valueOf(o.getValue().toString())).orElse(null); if (optimizeFor != null) { - FileOptions options = DescriptorProtos.FileOptions.newBuilder().setOptimizeFor(optimizeFor).build(); + FileOptions options = DescriptorProtos.FileOptions.newBuilder().setOptimizeFor(optimizeFor) + .build(); schema.mergeOptions(options); } @@ -644,8 +668,8 @@ public static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, } /** - * When schema loader links the schema, it also includes google.protobuf types in it. - * We want to ignore all the other types except for the ones that are present in the current file. + * When schema loader links the schema, it also includes google.protobuf types in it. We want to ignore + * all the other types except for the ones that are present in the current file. * * @return true if a type is a parent type, false otherwise. */ @@ -654,31 +678,31 @@ private static boolean isParentLevelType(ProtoType protoType, Optional o if (optionalPackageName.isPresent()) { String packageName = optionalPackageName.get(); - //If the type doesn't start with the package name, ignore it. + // If the type doesn't start with the package name, ignore it. if (!typeName.startsWith(packageName)) { return false; } - //We only want to consider the parent level types. The list can contain following, - //[io.apicurio.foo.bar.Customer.Address, io.apicurio.foo.bar.Customer, google.protobuf.Timestamp] - //We want to only get the type "io.apicurio.foo.bar.Customer" which is parent level type. + // We only want to consider the parent level types. The list can contain following, + // [io.apicurio.foo.bar.Customer.Address, io.apicurio.foo.bar.Customer, google.protobuf.Timestamp] + // We want to only get the type "io.apicurio.foo.bar.Customer" which is parent level type. String[] typeNames = typeName.split(packageName)[1].split("\\."); boolean isNotNested = typeNames.length <= 2; return isNotNested; } - //In case the package is not defined, we select the types that are not google types or metadata types. + // In case the package is not defined, we select the types that are not google types or metadata + // types. return !typeName.startsWith("google.type") && !typeName.startsWith("google.protobuf") - && !typeName.startsWith("metadata") - && !typeName.startsWith("additionalTypes"); + && !typeName.startsWith("metadata") && !typeName.startsWith("additionalTypes"); } - private static DescriptorProto messageElementToDescriptorProto( - MessageType messageElem, Schema schema, ProtoFile element) { + private static DescriptorProto messageElementToDescriptorProto(MessageType messageElem, Schema schema, + ProtoFile element) { ProtobufMessage message = new ProtobufMessage(); message.protoBuilder().setName(messageElem.getType().getSimpleName()); - Comparator locationComparator = - Comparator.comparing(Location::getLine).thenComparing(Location::getColumn); + Comparator locationComparator = Comparator.comparing(Location::getLine) + .thenComparing(Location::getColumn); Map allNestedTypes = new TreeMap<>(locationComparator); List allFields = new ArrayList<>(); @@ -691,19 +715,15 @@ private static DescriptorProto messageElementToDescriptorProto( } } - final Predicate isProto3Optional = - field -> - Field.Label.OPTIONAL.equals(field.getLabel()) && Syntax.PROTO_3.equals(element.getSyntax()); + final Predicate isProto3Optional = field -> Field.Label.OPTIONAL.equals(field.getLabel()) + && Syntax.PROTO_3.equals(element.getSyntax()); final List oneOfs = messageElem.getOneOfs(); - final List proto3OptionalOneOfs = - messageElem.getFieldsAndOneOfFields() - .stream() - .filter(isProto3Optional) - .map(FileDescriptorUtils::getProto3OptionalField) - .collect(Collectors.toList()); - - //Proto3 Optionals are considered as "synthetic-oneofs" by Protobuf compiler. + final List proto3OptionalOneOfs = messageElem.getFieldsAndOneOfFields().stream() + .filter(isProto3Optional).map(FileDescriptorUtils::getProto3OptionalField) + .collect(Collectors.toList()); + + // Proto3 Optionals are considered as "synthetic-oneofs" by Protobuf compiler. oneOfs.addAll(proto3OptionalOneOfs); final Function> findOneOfByFieldName = fieldName -> { @@ -715,12 +735,12 @@ private static DescriptorProto messageElementToDescriptorProto( return Optional.empty(); }; - //Add all the declared fields first skipping oneOfs. + // Add all the declared fields first skipping oneOfs. for (final Field field : messageElem.getDeclaredFields()) { final Optional optionalOneOf = findOneOfByFieldName.apply(field.getName()); if (!optionalOneOf.isPresent()) { Field.Label fieldLabel = field.getLabel(); - //Fields are optional by default in Proto3. + // Fields are optional by default in Proto3. String label = fieldLabel != null ? fieldLabel.toString().toLowerCase() : OPTIONAL; String fieldType = determineFieldType(field.getType(), schema); @@ -734,46 +754,49 @@ private static DescriptorProto messageElementToDescriptorProto( fieldType = "message"; String fieldMapEntryName = toMapEntry(field.getName()); // Map entry field name is capitalized - fieldMapEntryName = fieldMapEntryName.substring(0, 1).toUpperCase() + fieldMapEntryName.substring(1); + fieldMapEntryName = fieldMapEntryName.substring(0, 1).toUpperCase() + + fieldMapEntryName.substring(1); // Map field type name is resolved with reference to the package fieldTypeName = String.format("%s.%s", messageElem.getType(), fieldMapEntryName); ProtobufMessage protobufMapMessage = new ProtobufMessage(); - DescriptorProto.Builder mapMessage = protobufMapMessage - .protoBuilder() - .setName(fieldMapEntryName) - .mergeOptions(DescriptorProtos.MessageOptions.newBuilder() - .setMapEntry(true) - .build()); - - protobufMapMessage - .addField(OPTIONAL, determineFieldType(keyType, schema), String.valueOf(keyType), KEY_FIELD, 1, null, null, null, null, null, null, null, null, null, null); - protobufMapMessage - .addField(OPTIONAL, determineFieldType(valueType, schema), String.valueOf(valueType), VALUE_FIELD, 2, null, null, null, null, null, null, null, null, null, null); + DescriptorProto.Builder mapMessage = protobufMapMessage.protoBuilder() + .setName(fieldMapEntryName).mergeOptions( + DescriptorProtos.MessageOptions.newBuilder().setMapEntry(true).build()); + + protobufMapMessage.addField(OPTIONAL, determineFieldType(keyType, schema), + String.valueOf(keyType), KEY_FIELD, 1, null, null, null, null, null, null, null, + null, null, null); + protobufMapMessage.addField(OPTIONAL, determineFieldType(valueType, schema), + String.valueOf(valueType), VALUE_FIELD, 2, null, null, null, null, null, null, + null, null, null, null); allNestedTypes.put(field.getLocation(), mapMessage.build()); } String jsonName = getDefaultJsonName(field.getName()).equals(field.getDeclaredJsonName()) - ? null : field.getDeclaredJsonName(); + ? null : field.getDeclaredJsonName(); Boolean isDeprecated = findOptionBoolean(DEPRECATED_OPTION, field.getOptions()); Boolean isPacked = findOptionBoolean(PACKED_OPTION, field.getOptions()); DescriptorProtos.FieldOptions.CType cType = findOption(CTYPE_OPTION, field.getOptions()) - .map(o -> DescriptorProtos.FieldOptions.CType.valueOf(o.getValue().toString())).orElse(null); + .map(o -> DescriptorProtos.FieldOptions.CType.valueOf(o.getValue().toString())) + .orElse(null); DescriptorProtos.FieldOptions.JSType jsType = findOption(JSTYPE_OPTION, field.getOptions()) - .map(o -> DescriptorProtos.FieldOptions.JSType.valueOf(o.getValue().toString())).orElse(null); - String metadataKey = findOptionString(ProtobufSchemaMetadata.metadataKey.getDescriptor().getFullName(), - field.getOptions()); - String metadataValue = findOptionString(ProtobufSchemaMetadata.metadataValue.getDescriptor().getFullName(), + .map(o -> DescriptorProtos.FieldOptions.JSType.valueOf(o.getValue().toString())) + .orElse(null); + String metadataKey = findOptionString( + ProtobufSchemaMetadata.metadataKey.getDescriptor().getFullName(), field.getOptions()); + String metadataValue = findOptionString( + ProtobufSchemaMetadata.metadataValue.getDescriptor().getFullName(), field.getOptions()); - allFields.add(ProtobufMessage.buildFieldDescriptorProto( - label, fieldType, fieldTypeName, field.getName(), field.getTag(), field.getDefault(), - jsonName, isDeprecated, isPacked, cType, jsType, metadataKey, metadataValue, null, null)); + allFields.add(ProtobufMessage.buildFieldDescriptorProto(label, fieldType, fieldTypeName, + field.getName(), field.getTag(), field.getDefault(), jsonName, isDeprecated, isPacked, + cType, jsType, metadataKey, metadataValue, null, null)); } } final Set addedOneOfs = new LinkedHashSet<>(); - //Add the oneOfs next including Proto3 Optionals. + // Add the oneOfs next including Proto3 Optionals. for (final OneOf oneOf : oneOfs) { if (addedOneOfs.contains(oneOf)) { continue; @@ -783,39 +806,36 @@ private static DescriptorProto messageElementToDescriptorProto( if (proto3OptionalOneOfs.contains(oneOf)) { isProto3OptionalField = true; } - OneofDescriptorProto.Builder oneofBuilder = OneofDescriptorProto.newBuilder().setName(oneOf.getName()); + OneofDescriptorProto.Builder oneofBuilder = OneofDescriptorProto.newBuilder() + .setName(oneOf.getName()); message.protoBuilder().addOneofDecl(oneofBuilder); for (Field oneOfField : oneOf.getFields()) { - String oneOfJsonName = getDefaultJsonName(oneOfField.getName()).equals(oneOfField.getDeclaredJsonName()) - ? null : oneOfField.getDeclaredJsonName(); + String oneOfJsonName = getDefaultJsonName(oneOfField.getName()) + .equals(oneOfField.getDeclaredJsonName()) ? null : oneOfField.getDeclaredJsonName(); Boolean oneOfIsDeprecated = findOptionBoolean(DEPRECATED_OPTION, oneOfField.getOptions()); Boolean oneOfIsPacked = findOptionBoolean(PACKED_OPTION, oneOfField.getOptions()); - DescriptorProtos.FieldOptions.CType oneOfCType = findOption(CTYPE_OPTION, oneOfField.getOptions()) - .map(o -> DescriptorProtos.FieldOptions.CType.valueOf(o.getValue().toString())).orElse(null); - DescriptorProtos.FieldOptions.JSType oneOfJsType = findOption(JSTYPE_OPTION, oneOfField.getOptions()) - .map(o -> DescriptorProtos.FieldOptions.JSType.valueOf(o.getValue().toString())).orElse(null); - String metadataKey = findOptionString(ProtobufSchemaMetadata.metadataKey.getDescriptor().getFullName(), + DescriptorProtos.FieldOptions.CType oneOfCType = findOption(CTYPE_OPTION, + oneOfField.getOptions()) + .map(o -> DescriptorProtos.FieldOptions.CType.valueOf(o.getValue().toString())) + .orElse(null); + DescriptorProtos.FieldOptions.JSType oneOfJsType = findOption(JSTYPE_OPTION, + oneOfField.getOptions()) + .map(o -> DescriptorProtos.FieldOptions.JSType.valueOf(o.getValue().toString())) + .orElse(null); + String metadataKey = findOptionString( + ProtobufSchemaMetadata.metadataKey.getDescriptor().getFullName(), oneOfField.getOptions()); - String metadataValue = findOptionString(ProtobufSchemaMetadata.metadataValue.getDescriptor().getFullName(), + String metadataValue = findOptionString( + ProtobufSchemaMetadata.metadataValue.getDescriptor().getFullName(), oneOfField.getOptions()); - allFields.add(ProtobufMessage.buildFieldDescriptorProto( - OPTIONAL, + allFields.add(ProtobufMessage.buildFieldDescriptorProto(OPTIONAL, determineFieldType(oneOfField.getType(), schema), - String.valueOf(oneOfField.getType()), - oneOfField.getName(), - oneOfField.getTag(), - oneOfField.getDefault(), - oneOfJsonName, - oneOfIsDeprecated, - oneOfIsPacked, - oneOfCType, - oneOfJsType, - metadataKey, - metadataValue, - message.protoBuilder().getOneofDeclCount() - 1, - isProto3OptionalField)); + String.valueOf(oneOfField.getType()), oneOfField.getName(), oneOfField.getTag(), + oneOfField.getDefault(), oneOfJsonName, oneOfIsDeprecated, oneOfIsPacked, oneOfCType, + oneOfJsType, metadataKey, metadataValue, + message.protoBuilder().getOneofDeclCount() - 1, isProto3OptionalField)); } addedOneOfs.add(oneOf); @@ -827,15 +847,13 @@ private static DescriptorProto messageElementToDescriptorProto( message.protoBuilder().addReservedName((String) elem); } else if (elem instanceof Integer) { int tag = (Integer) elem; - DescriptorProto.ReservedRange.Builder rangeBuilder = DescriptorProto.ReservedRange.newBuilder() - .setStart(tag) - .setEnd(tag + 1); + DescriptorProto.ReservedRange.Builder rangeBuilder = DescriptorProto.ReservedRange + .newBuilder().setStart(tag).setEnd(tag + 1); message.protoBuilder().addReservedRange(rangeBuilder.build()); } else if (elem instanceof IntRange) { IntRange range = (IntRange) elem; - DescriptorProto.ReservedRange.Builder rangeBuilder = DescriptorProto.ReservedRange.newBuilder() - .setStart(range.getStart()) - .setEnd(range.getEndInclusive() + 1); + DescriptorProto.ReservedRange.Builder rangeBuilder = DescriptorProto.ReservedRange + .newBuilder().setStart(range.getStart()).setEnd(range.getEndInclusive() + 1); message.protoBuilder().addReservedRange(rangeBuilder.build()); } else { throw new IllegalStateException( @@ -847,15 +865,13 @@ private static DescriptorProto messageElementToDescriptorProto( for (Object elem : extensions.getValues()) { if (elem instanceof Integer) { int tag = (Integer) elem; - DescriptorProto.ExtensionRange.Builder extensionBuilder = DescriptorProto.ExtensionRange.newBuilder() - .setStart(tag) - .setEnd(tag + 1); + DescriptorProto.ExtensionRange.Builder extensionBuilder = DescriptorProto.ExtensionRange + .newBuilder().setStart(tag).setEnd(tag + 1); message.protoBuilder().addExtensionRange(extensionBuilder.build()); } else if (elem instanceof IntRange) { IntRange range = (IntRange) elem; - DescriptorProto.ExtensionRange.Builder extensionBuilder = DescriptorProto.ExtensionRange.newBuilder() - .setStart(range.getStart()) - .setEnd(range.getEndInclusive() + 1); + DescriptorProto.ExtensionRange.Builder extensionBuilder = DescriptorProto.ExtensionRange + .newBuilder().setStart(range.getStart()).setEnd(range.getEndInclusive() + 1); message.protoBuilder().addExtensionRange(extensionBuilder.build()); } else { throw new IllegalStateException( @@ -866,14 +882,15 @@ private static DescriptorProto messageElementToDescriptorProto( Boolean isMapEntry = findOptionBoolean(MAP_ENTRY_OPTION, messageElem.getOptions()); if (isMapEntry != null) { - DescriptorProtos.MessageOptions.Builder optionsBuilder = DescriptorProtos.MessageOptions.newBuilder() - .setMapEntry(isMapEntry); + DescriptorProtos.MessageOptions.Builder optionsBuilder = DescriptorProtos.MessageOptions + .newBuilder().setMapEntry(isMapEntry); message.protoBuilder().mergeOptions(optionsBuilder.build()); } - Boolean noStandardDescriptorAccessor = findOptionBoolean(NO_STANDARD_DESCRIPTOR_OPTION, messageElem.getOptions()); + Boolean noStandardDescriptorAccessor = findOptionBoolean(NO_STANDARD_DESCRIPTOR_OPTION, + messageElem.getOptions()); if (noStandardDescriptorAccessor != null) { - DescriptorProtos.MessageOptions.Builder optionsBuilder = DescriptorProtos.MessageOptions.newBuilder() - .setNoStandardDescriptorAccessor(noStandardDescriptorAccessor); + DescriptorProtos.MessageOptions.Builder optionsBuilder = DescriptorProtos.MessageOptions + .newBuilder().setNoStandardDescriptorAccessor(noStandardDescriptorAccessor); message.protoBuilder().mergeOptions(optionsBuilder.build()); } @@ -896,40 +913,37 @@ private static String determineFieldType(ProtoType protoType, Schema schema) { } /** - * Proto3 optional fields are "synthetic one-ofs" and are written as one-of fields over the wire. - * This method generates the synthetic one-of from a Proto3 optional field. + * Proto3 optional fields are "synthetic one-ofs" and are written as one-of fields over the wire. This + * method generates the synthetic one-of from a Proto3 optional field. */ private static OneOf getProto3OptionalField(Field field) { - return new OneOf("_" + field.getName(), "", Collections.singletonList(field), field.getLocation(), field.getOptions()); + return new OneOf("_" + field.getName(), "", Collections.singletonList(field), field.getLocation(), + field.getOptions()); } private static EnumDescriptorProto enumElementToProto(EnumType enumElem) { Boolean allowAlias = findOptionBoolean(ALLOW_ALIAS_OPTION, enumElem.getOptions()); - EnumDescriptorProto.Builder builder = EnumDescriptorProto.newBuilder() - .setName(enumElem.getName()); + EnumDescriptorProto.Builder builder = EnumDescriptorProto.newBuilder().setName(enumElem.getName()); if (allowAlias != null) { DescriptorProtos.EnumOptions.Builder optionsBuilder = DescriptorProtos.EnumOptions.newBuilder() .setAllowAlias(allowAlias); builder.mergeOptions(optionsBuilder.build()); } for (EnumConstant constant : enumElem.getConstants()) { - builder.addValue(EnumValueDescriptorProto.newBuilder() - .setName(constant.getName()) - .setNumber(constant.getTag()) - .build()); + builder.addValue(EnumValueDescriptorProto.newBuilder().setName(constant.getName()) + .setNumber(constant.getTag()).build()); } return builder.build(); } private static DescriptorProtos.ServiceDescriptorProto serviceElementToProto(Service serviceElem) { - ServiceDescriptorProto.Builder builder = ServiceDescriptorProto.newBuilder().setName(serviceElem.name()); + ServiceDescriptorProto.Builder builder = ServiceDescriptorProto.newBuilder() + .setName(serviceElem.name()); for (Rpc rpc : serviceElem.rpcs()) { - MethodDescriptorProto.Builder methodBuilder = MethodDescriptorProto - .newBuilder() - .setName(rpc.getName()) - .setInputType(getTypeName(rpc.getRequestType().toString())) + MethodDescriptorProto.Builder methodBuilder = MethodDescriptorProto.newBuilder() + .setName(rpc.getName()).setInputType(getTypeName(rpc.getRequestType().toString())) .setOutputType(getTypeName(rpc.getResponseType().toString())); if (rpc.getRequestStreaming()) { methodBuilder.setClientStreaming(rpc.getRequestStreaming()); @@ -939,13 +953,12 @@ private static DescriptorProtos.ServiceDescriptorProto serviceElementToProto(Ser } Boolean deprecated = findOptionBoolean(DEPRECATED_OPTION, rpc.getOptions()); if (deprecated != null) { - MethodOptions.Builder optionsBuilder = MethodOptions.newBuilder() - .setDeprecated(deprecated); + MethodOptions.Builder optionsBuilder = MethodOptions.newBuilder().setDeprecated(deprecated); methodBuilder.mergeOptions(optionsBuilder.build()); } - MethodOptions.IdempotencyLevel idempotencyLevel = findOption(IDEMPOTENCY_LEVEL_OPTION, rpc.getOptions()) - .map(o -> MethodOptions.IdempotencyLevel.valueOf(o.getValue().toString())) - .orElse(null); + MethodOptions.IdempotencyLevel idempotencyLevel = findOption(IDEMPOTENCY_LEVEL_OPTION, + rpc.getOptions()) + .map(o -> MethodOptions.IdempotencyLevel.valueOf(o.getValue().toString())).orElse(null); if (idempotencyLevel != null) { MethodOptions.Builder optionsBuilder = MethodOptions.newBuilder() .setIdempotencyLevel(idempotencyLevel); @@ -957,8 +970,8 @@ private static DescriptorProtos.ServiceDescriptorProto serviceElementToProto(Ser Boolean deprecated = findOptionBoolean(DEPRECATED_OPTION, serviceElem.options()); if (deprecated != null) { - DescriptorProtos.ServiceOptions.Builder optionsBuilder = DescriptorProtos.ServiceOptions.newBuilder() - .setDeprecated(deprecated); + DescriptorProtos.ServiceOptions.Builder optionsBuilder = DescriptorProtos.ServiceOptions + .newBuilder().setDeprecated(deprecated); builder.mergeOptions(optionsBuilder.build()); } @@ -984,9 +997,8 @@ private static Boolean findOptionBoolean(String name, Options options) { return findOption(name, options).map(o -> Boolean.valueOf(o.getValue().toString())).orElse(null); } - public static ProtoFileElement fileDescriptorWithDepsToProtoFile( - FileDescriptor file, Map dependencies - ) { + public static ProtoFileElement fileDescriptorWithDepsToProtoFile(FileDescriptor file, + Map dependencies) { for (FileDescriptor dependency : file.getDependencies()) { String depName = dependency.getName(); dependencies.put(depName, fileDescriptorWithDepsToProtoFile(dependency, dependencies)); @@ -1039,75 +1051,93 @@ public static ProtoFileElement fileDescriptorToProtoFile(FileDescriptorProto fil } ImmutableList.Builder options = ImmutableList.builder(); if (file.getOptions().hasJavaPackage()) { - OptionElement option = new OptionElement(JAVA_PACKAGE_OPTION, stringKind, file.getOptions().getJavaPackage(), false); + OptionElement option = new OptionElement(JAVA_PACKAGE_OPTION, stringKind, + file.getOptions().getJavaPackage(), false); options.add(option); } if (file.getOptions().hasJavaOuterClassname()) { - OptionElement option = new OptionElement(JAVA_OUTER_CLASSNAME_OPTION, stringKind, file.getOptions().getJavaOuterClassname(), false); + OptionElement option = new OptionElement(JAVA_OUTER_CLASSNAME_OPTION, stringKind, + file.getOptions().getJavaOuterClassname(), false); options.add(option); } if (file.getOptions().hasJavaMultipleFiles()) { - OptionElement option = new OptionElement(JAVA_MULTIPLE_FILES_OPTION, booleanKind, file.getOptions().getJavaMultipleFiles(), false); + OptionElement option = new OptionElement(JAVA_MULTIPLE_FILES_OPTION, booleanKind, + file.getOptions().getJavaMultipleFiles(), false); options.add(option); } if (file.getOptions().hasJavaGenericServices()) { - OptionElement option = new OptionElement(JAVA_GENERIC_SERVICES_OPTION, booleanKind, file.getOptions().getJavaGenericServices(), false); + OptionElement option = new OptionElement(JAVA_GENERIC_SERVICES_OPTION, booleanKind, + file.getOptions().getJavaGenericServices(), false); options.add(option); } if (file.getOptions().hasJavaStringCheckUtf8()) { - OptionElement option = new OptionElement(JAVA_STRING_CHECK_UTF8_OPTION, booleanKind, file.getOptions().getJavaStringCheckUtf8(), false); + OptionElement option = new OptionElement(JAVA_STRING_CHECK_UTF8_OPTION, booleanKind, + file.getOptions().getJavaStringCheckUtf8(), false); options.add(option); } if (file.getOptions().hasCcGenericServices()) { - OptionElement option = new OptionElement(CC_GENERIC_SERVICES_OPTION, booleanKind, file.getOptions().getCcGenericServices(), false); + OptionElement option = new OptionElement(CC_GENERIC_SERVICES_OPTION, booleanKind, + file.getOptions().getCcGenericServices(), false); options.add(option); } if (file.getOptions().hasCcEnableArenas()) { - OptionElement option = new OptionElement(CC_ENABLE_ARENAS_OPTION, booleanKind, file.getOptions().getCcEnableArenas(), false); + OptionElement option = new OptionElement(CC_ENABLE_ARENAS_OPTION, booleanKind, + file.getOptions().getCcEnableArenas(), false); options.add(option); } if (file.getOptions().hasCsharpNamespace()) { - OptionElement option = new OptionElement(CSHARP_NAMESPACE_OPTION, stringKind, file.getOptions().getCsharpNamespace(), false); + OptionElement option = new OptionElement(CSHARP_NAMESPACE_OPTION, stringKind, + file.getOptions().getCsharpNamespace(), false); options.add(option); } if (file.getOptions().hasGoPackage()) { - OptionElement option = new OptionElement(GO_PACKAGE_OPTION, stringKind, file.getOptions().getGoPackage(), false); + OptionElement option = new OptionElement(GO_PACKAGE_OPTION, stringKind, + file.getOptions().getGoPackage(), false); options.add(option); } if (file.getOptions().hasObjcClassPrefix()) { - OptionElement option = new OptionElement(OBJC_CLASS_PREFIX_OPTION, stringKind, file.getOptions().getObjcClassPrefix(), false); + OptionElement option = new OptionElement(OBJC_CLASS_PREFIX_OPTION, stringKind, + file.getOptions().getObjcClassPrefix(), false); options.add(option); } if (file.getOptions().hasPhpClassPrefix()) { - OptionElement option = new OptionElement(PHP_CLASS_PREFIX_OPTION, stringKind, file.getOptions().getPhpClassPrefix(), false); + OptionElement option = new OptionElement(PHP_CLASS_PREFIX_OPTION, stringKind, + file.getOptions().getPhpClassPrefix(), false); options.add(option); } if (file.getOptions().hasPhpGenericServices()) { - OptionElement option = new OptionElement(PHP_GENERIC_SERVICES_OPTION, booleanKind, file.getOptions().getPhpGenericServices(), false); + OptionElement option = new OptionElement(PHP_GENERIC_SERVICES_OPTION, booleanKind, + file.getOptions().getPhpGenericServices(), false); options.add(option); } if (file.getOptions().hasPhpMetadataNamespace()) { - OptionElement option = new OptionElement(PHP_METADATA_NAMESPACE_OPTION, stringKind, file.getOptions().getPhpMetadataNamespace(), false); + OptionElement option = new OptionElement(PHP_METADATA_NAMESPACE_OPTION, stringKind, + file.getOptions().getPhpMetadataNamespace(), false); options.add(option); } if (file.getOptions().hasPhpNamespace()) { - OptionElement option = new OptionElement(PHP_NAMESPACE_OPTION, stringKind, file.getOptions().getPhpNamespace(), false); + OptionElement option = new OptionElement(PHP_NAMESPACE_OPTION, stringKind, + file.getOptions().getPhpNamespace(), false); options.add(option); } if (file.getOptions().hasPyGenericServices()) { - OptionElement option = new OptionElement(PY_GENERIC_SERVICES_OPTION, booleanKind, file.getOptions().getPyGenericServices(), false); + OptionElement option = new OptionElement(PY_GENERIC_SERVICES_OPTION, booleanKind, + file.getOptions().getPyGenericServices(), false); options.add(option); } if (file.getOptions().hasRubyPackage()) { - OptionElement option = new OptionElement(RUBY_PACKAGE_OPTION, stringKind, file.getOptions().getRubyPackage(), false); + OptionElement option = new OptionElement(RUBY_PACKAGE_OPTION, stringKind, + file.getOptions().getRubyPackage(), false); options.add(option); } if (file.getOptions().hasSwiftPrefix()) { - OptionElement option = new OptionElement(SWIFT_PREFIX_OPTION, stringKind, file.getOptions().getSwiftPrefix(), false); + OptionElement option = new OptionElement(SWIFT_PREFIX_OPTION, stringKind, + file.getOptions().getSwiftPrefix(), false); options.add(option); } if (file.getOptions().hasOptimizeFor()) { - OptionElement option = new OptionElement(OPTIMIZE_FOR_OPTION, enumKind, file.getOptions().getOptimizeFor(), false); + OptionElement option = new OptionElement(OPTIMIZE_FOR_OPTION, enumKind, + file.getOptions().getOptimizeFor(), false); options.add(option); } return new ProtoFileElement(DEFAULT_LOCATION, packageName, syntax, imports.build(), @@ -1173,8 +1203,8 @@ private static MessageElement toMessage(FileDescriptorProto file, DescriptorProt } ImmutableList.Builder options = ImmutableList.builder(); if (descriptor.getOptions().hasMapEntry()) { - OptionElement option = new OptionElement(MAP_ENTRY_OPTION, booleanKind, descriptor.getOptions().getMapEntry(), - false); + OptionElement option = new OptionElement(MAP_ENTRY_OPTION, booleanKind, + descriptor.getOptions().getMapEntry(), false); options.add(option); } if (descriptor.getOptions().hasNoStandardDescriptorAccessor()) { @@ -1184,16 +1214,16 @@ private static MessageElement toMessage(FileDescriptorProto file, DescriptorProt } return new MessageElement(DEFAULT_LOCATION, name, "", nested.build(), options.build(), - reserved.build(), fields.build(), - oneofs.stream() - //Ignore oneOfs with no fields (like Proto3 Optional) + reserved.build(), fields.build(), oneofs.stream() + // Ignore oneOfs with no fields (like Proto3 Optional) .filter(e -> e.getValue().build().size() != 0) .map(e -> toOneof(e.getKey(), e.getValue())).collect(Collectors.toList()), extensions.build(), Collections.emptyList(), Collections.emptyList()); } private static OneOfElement toOneof(String name, ImmutableList.Builder fields) { - return new OneOfElement(name, "", fields.build(), Collections.emptyList(), Collections.emptyList(), DEFAULT_LOCATION); + return new OneOfElement(name, "", fields.build(), Collections.emptyList(), Collections.emptyList(), + DEFAULT_LOCATION); } private static EnumElement toEnum(EnumDescriptorProto ed) { @@ -1207,8 +1237,8 @@ private static EnumElement toEnum(EnumDescriptorProto ed) { ImmutableList.Builder options = ImmutableList.builder(); if (ed.getOptions().hasAllowAlias()) { - OptionElement option = new OptionElement(ALLOW_ALIAS_OPTION, booleanKind, ed.getOptions().getAllowAlias(), - false); + OptionElement option = new OptionElement(ALLOW_ALIAS_OPTION, booleanKind, + ed.getOptions().getAllowAlias(), false); options.add(option); } @@ -1217,7 +1247,8 @@ private static EnumElement toEnum(EnumDescriptorProto ed) { ReservedElement reservedElem; while (reservedRangeIterator.hasNext()) { - EnumDescriptorProto.EnumReservedRange range = (EnumDescriptorProto.EnumReservedRange) reservedRangeIterator.next(); + EnumDescriptorProto.EnumReservedRange range = (EnumDescriptorProto.EnumReservedRange) reservedRangeIterator + .next(); reservedElem = toReserved(range); reserved.add(reservedElem); } @@ -1230,15 +1261,16 @@ private static EnumElement toEnum(EnumDescriptorProto ed) { reserved.add(reservedElem); } - return new EnumElement(DEFAULT_LOCATION, name, "", options.build(), constants.build(), reserved.build()); + return new EnumElement(DEFAULT_LOCATION, name, "", options.build(), constants.build(), + reserved.build()); } private static ServiceElement toService(DescriptorProtos.ServiceDescriptorProto sv) { String name = sv.getName(); ImmutableList.Builder rpcs = ImmutableList.builder(); for (MethodDescriptorProto md : sv.getMethodList()) { - rpcs.add(new RpcElement(DEFAULT_LOCATION, md.getName(), "", md.getInputType(), - md.getOutputType(), md.getClientStreaming(), md.getServerStreaming(), getMethodOptionList(md.getOptions()))); + rpcs.add(new RpcElement(DEFAULT_LOCATION, md.getName(), "", md.getInputType(), md.getOutputType(), + md.getClientStreaming(), md.getServerStreaming(), getMethodOptionList(md.getOptions()))); } return new ServiceElement(DEFAULT_LOCATION, name, "", rpcs.build(), @@ -1250,7 +1282,8 @@ private static FieldElement toField(FileDescriptorProto file, FieldDescriptorPro DescriptorProtos.FieldOptions fieldDescriptorOptions = fd.getOptions(); ImmutableList.Builder options = ImmutableList.builder(); if (fieldDescriptorOptions.hasPacked()) { - OptionElement option = new OptionElement(PACKED_OPTION, booleanKind, fd.getOptions().getPacked(), false); + OptionElement option = new OptionElement(PACKED_OPTION, booleanKind, fd.getOptions().getPacked(), + false); options.add(option); } if (fd.hasJsonName() && !fd.getJsonName().equals(getDefaultJsonName(name))) { @@ -1258,16 +1291,18 @@ private static FieldElement toField(FileDescriptorProto file, FieldDescriptorPro options.add(option); } if (fieldDescriptorOptions.hasDeprecated()) { - OptionElement option = new OptionElement(DEPRECATED_OPTION, booleanKind, fieldDescriptorOptions.getDeprecated(), - false); + OptionElement option = new OptionElement(DEPRECATED_OPTION, booleanKind, + fieldDescriptorOptions.getDeprecated(), false); options.add(option); } if (fieldDescriptorOptions.hasCtype()) { - OptionElement option = new OptionElement(CTYPE_OPTION, enumKind, fieldDescriptorOptions.getCtype(), false); + OptionElement option = new OptionElement(CTYPE_OPTION, enumKind, + fieldDescriptorOptions.getCtype(), false); options.add(option); } if (fieldDescriptorOptions.hasJstype()) { - OptionElement option = new OptionElement(JSTYPE_OPTION, enumKind, fieldDescriptorOptions.getJstype(), false); + OptionElement option = new OptionElement(JSTYPE_OPTION, enumKind, + fieldDescriptorOptions.getJstype(), false); options.add(option); } if (fieldDescriptorOptions.hasExtension(ProtobufSchemaMetadata.metadataKey)) { @@ -1283,11 +1318,12 @@ private static FieldElement toField(FileDescriptorProto file, FieldDescriptorPro options.add(valueOption); } - //Implicitly jsonName to null as Options is already setting it. Setting it here results in duplicate json_name - //option in inferred schema. + // Implicitly jsonName to null as Options is already setting it. Setting it here results in duplicate + // json_name + // option in inferred schema. String jsonName = null; String defaultValue = fd.hasDefaultValue() && fd.getDefaultValue() != null ? fd.getDefaultValue() - : null; + : null; return new FieldElement(DEFAULT_LOCATION, inOneof ? null : label(file, fd), dataType(fd), name, defaultValue, jsonName, fd.getNumber(), "", options.build()); } @@ -1306,7 +1342,7 @@ private static Field.Label label(FileDescriptorProto file, FieldDescriptorProto case LABEL_REQUIRED: return isProto3 ? null : Field.Label.REQUIRED; case LABEL_OPTIONAL: - //If it's a Proto3 optional, we have to print the optional label. + // If it's a Proto3 optional, we have to print the optional label. return isProto3 && !fd.hasProto3Optional() ? null : Field.Label.OPTIONAL; case LABEL_REPEATED: return Field.Label.REPEATED; @@ -1337,11 +1373,13 @@ private static List getOptionList(boolean hasDeprecated, boolean private static List getMethodOptionList(MethodOptions methodOptions) { ImmutableList.Builder options = ImmutableList.builder(); if (methodOptions.hasDeprecated()) { - OptionElement option = new OptionElement(DEPRECATED_OPTION, booleanKind, methodOptions.getDeprecated(), false); + OptionElement option = new OptionElement(DEPRECATED_OPTION, booleanKind, + methodOptions.getDeprecated(), false); options.add(option); } if (methodOptions.hasIdempotencyLevel()) { - OptionElement option = new OptionElement(IDEMPOTENCY_LEVEL_OPTION, enumKind, methodOptions.getIdempotencyLevel(), false); + OptionElement option = new OptionElement(IDEMPOTENCY_LEVEL_OPTION, enumKind, + methodOptions.getIdempotencyLevel(), false); options.add(option); } @@ -1363,8 +1401,8 @@ private static String getDefaultJsonName(String fieldName) { return defaultJsonName; } - - public static Descriptors.Descriptor toDescriptor(String name, ProtoFileElement protoFileElement, Map dependencies) { + public static Descriptors.Descriptor toDescriptor(String name, ProtoFileElement protoFileElement, + Map dependencies) { return toDynamicSchema(name, protoFileElement, dependencies).getMessageDescriptor(name); } @@ -1374,16 +1412,15 @@ public static MessageElement firstMessage(ProtoFileElement fileElement) { return (MessageElement) typeElement; } } - //Intended null return + // Intended null return return null; } /* * DynamicSchema is used as a temporary helper class and should not be exposed in the API. */ - private static DynamicSchema toDynamicSchema( - String name, ProtoFileElement rootElem, Map dependencies - ) { + private static DynamicSchema toDynamicSchema(String name, ProtoFileElement rootElem, + Map dependencies) { DynamicSchema.Builder schema = DynamicSchema.newBuilder(); try { @@ -1439,9 +1476,7 @@ private static DynamicSchema toDynamicSchema( } } - private static MessageDefinition toDynamicMessage( - MessageElement messageElem - ) { + private static MessageDefinition toDynamicMessage(MessageElement messageElem) { MessageDefinition.Builder message = MessageDefinition.newBuilder(messageElem.getName()); for (TypeElement type : messageElem.getNestedTypes()) { if (type instanceof MessageElement) { @@ -1457,13 +1492,7 @@ private static MessageDefinition toDynamicMessage( String defaultVal = field.getDefaultValue(); String jsonName = findOption("json_name", field.getOptions()) .map(o -> o.getValue().toString()).orElse(null); - oneofBuilder.addField( - field.getType(), - field.getName(), - field.getTag(), - defaultVal, - jsonName - ); + oneofBuilder.addField(field.getType(), field.getName(), field.getTag(), defaultVal, jsonName); added.add(field.getName()); } } @@ -1492,15 +1521,8 @@ private static MessageDefinition toDynamicMessage( mapMessage.addField(null, valueType.getSimpleName(), VALUE_FIELD, 2, null); message.addMessageDefinition(mapMessage.build()); } - message.addField( - label, - fieldType, - field.getName(), - field.getTag(), - defaultVal, - jsonName, - isPacked - ); + message.addField(label, fieldType, field.getName(), field.getTag(), defaultVal, jsonName, + isPacked); } for (ReservedElement reserved : messageElem.getReserveds()) { for (Object elem : reserved.getValues()) { @@ -1513,8 +1535,8 @@ private static MessageDefinition toDynamicMessage( IntRange range = (IntRange) elem; message.addReservedRange(range.getStart(), range.getEndInclusive()); } else { - throw new IllegalStateException("Unsupported reserved type: " + elem.getClass() - .getName()); + throw new IllegalStateException( + "Unsupported reserved type: " + elem.getClass().getName()); } } } diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/MessageDefinition.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/MessageDefinition.java index d424cf47a3..3b97632bfa 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/MessageDefinition.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/MessageDefinition.java @@ -42,22 +42,16 @@ public Builder addField(String label, String type, String name, int num, String return addField(label, type, name, num, defaultVal, null, null); } - public Builder addField( - String label, - String type, - String name, - int num, - String defaultVal, - String jsonName, - Boolean isPacked - ) { + public Builder addField(String label, String type, String name, int num, String defaultVal, + String jsonName, Boolean isPacked) { DescriptorProtos.FieldDescriptorProto.Label protoLabel = sLabelMap.get(label); doAddField(protoLabel, type, name, num, defaultVal, jsonName, isPacked, null); return this; } public OneofBuilder addOneof(String oneofName) { - mMsgTypeBuilder.addOneofDecl(DescriptorProtos.OneofDescriptorProto.newBuilder().setName(oneofName).build()); + mMsgTypeBuilder.addOneofDecl( + DescriptorProtos.OneofDescriptorProto.newBuilder().setName(oneofName).build()); return new OneofBuilder(this, mOneofIndex++); } @@ -79,8 +73,8 @@ public Builder addReservedName(String reservedName) { // Note: added public Builder addReservedRange(int start, int end) { - DescriptorProtos.DescriptorProto.ReservedRange.Builder rangeBuilder = - DescriptorProtos.DescriptorProto.ReservedRange.newBuilder(); + DescriptorProtos.DescriptorProto.ReservedRange.Builder rangeBuilder = DescriptorProtos.DescriptorProto.ReservedRange + .newBuilder(); rangeBuilder.setStart(start).setEnd(end); mMsgTypeBuilder.addReservedRange(rangeBuilder.build()); return this; @@ -88,8 +82,8 @@ public Builder addReservedRange(int start, int end) { // Note: added public Builder setMapEntry(boolean mapEntry) { - DescriptorProtos.MessageOptions.Builder optionsBuilder = - DescriptorProtos.MessageOptions.newBuilder(); + DescriptorProtos.MessageOptions.Builder optionsBuilder = DescriptorProtos.MessageOptions + .newBuilder(); optionsBuilder.setMapEntry(mapEntry); mMsgTypeBuilder.mergeOptions(optionsBuilder.build()); return this; @@ -106,17 +100,10 @@ private Builder(String msgTypeName) { mMsgTypeBuilder.setName(msgTypeName); } - private void doAddField( - DescriptorProtos.FieldDescriptorProto.Label label, - String type, - String name, - int num, - String defaultVal, - String jsonName, - Boolean isPacked, - OneofBuilder oneofBuilder - ) { - DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder(); + private void doAddField(DescriptorProtos.FieldDescriptorProto.Label label, String type, String name, + int num, String defaultVal, String jsonName, Boolean isPacked, OneofBuilder oneofBuilder) { + DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto + .newBuilder(); // Note: changed if (label != null) { fieldBuilder.setLabel(label); @@ -138,8 +125,8 @@ private void doAddField( fieldBuilder.setJsonName(jsonName); } if (isPacked != null) { - DescriptorProtos.FieldOptions.Builder optionsBuilder = - DescriptorProtos.FieldOptions.newBuilder(); + DescriptorProtos.FieldOptions.Builder optionsBuilder = DescriptorProtos.FieldOptions + .newBuilder(); optionsBuilder.setPacked(isPacked); fieldBuilder.mergeOptions(optionsBuilder.build()); } @@ -160,23 +147,9 @@ public OneofBuilder addField(String type, String name, int num, String defaultVa return addField(type, name, num, defaultVal, null); } - public OneofBuilder addField( - String type, - String name, - int num, - String defaultVal, - String jsonName - ) { - mMsgBuilder.doAddField( - DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL, - type, - name, - num, - defaultVal, - jsonName, - null, - this - ); + public OneofBuilder addField(String type, String name, int num, String defaultVal, String jsonName) { + mMsgBuilder.doAddField(DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL, type, name, + num, defaultVal, jsonName, null, this); return this; } @@ -221,9 +194,9 @@ private OneofBuilder(MessageDefinition.Builder msgBuilder, int oneofIdx) { sTypeMap.put("bool", DescriptorProtos.FieldDescriptorProto.Type.TYPE_BOOL); sTypeMap.put("string", DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING); sTypeMap.put("bytes", DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES); - //sTypeMap.put("enum", FieldDescriptorProto.Type.TYPE_ENUM); - //sTypeMap.put("message", FieldDescriptorProto.Type.TYPE_MESSAGE); - //sTypeMap.put("group", FieldDescriptorProto.Type.TYPE_GROUP); + // sTypeMap.put("enum", FieldDescriptorProto.Type.TYPE_ENUM); + // sTypeMap.put("message", FieldDescriptorProto.Type.TYPE_MESSAGE); + // sTypeMap.put("group", FieldDescriptorProto.Type.TYPE_GROUP); sLabelMap = new HashMap(); sLabelMap.put("optional", DescriptorProtos.FieldDescriptorProto.Label.LABEL_OPTIONAL); diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufFile.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufFile.java index 7480288a15..44aaa9915c 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufFile.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufFile.java @@ -28,8 +28,8 @@ import java.util.Set; /** - * Indexed representation of the data resulting from parsing a single .proto protobuf schema file, - * used mainly for schema validation. + * Indexed representation of the data resulting from parsing a single .proto protobuf schema file, used mainly + * for schema validation. * * @see Protolock * @see ProtobufCompatibilityChecker @@ -60,7 +60,7 @@ public ProtobufFile(String data) { } public ProtobufFile(File file) throws IOException { -// Location location = Location.get(file.getAbsolutePath()); + // Location location = Location.get(file.getAbsolutePath()); List data = Files.readLines(file, StandardCharsets.UTF_8); element = toProtoFileElement(String.join("\n", data)); buildIndexes(); @@ -152,7 +152,7 @@ public Map> getServiceRPCSignatures() { public Syntax getSyntax() { Syntax syntax = element.getSyntax(); - return syntax != null ? syntax : Syntax.PROTO_2 /* default syntax */; + return syntax != null ? syntax : Syntax.PROTO_2 /* default syntax */; } private void buildIndexes() { @@ -179,7 +179,8 @@ private void buildIndexes() { for (RpcElement rpcElement : serviceElement.getRpcs()) { rpcNames.add(rpcElement.getName()); - String signature = rpcElement.getRequestType() + ":" + rpcElement.getRequestStreaming() + "->" + rpcElement.getResponseType() + ":" + rpcElement.getResponseStreaming(); + String signature = rpcElement.getRequestType() + ":" + rpcElement.getRequestStreaming() + "->" + + rpcElement.getResponseType() + ":" + rpcElement.getResponseStreaming(); rpcSignatures.put(rpcElement.getName(), signature); } if (!rpcNames.isEmpty()) { @@ -252,7 +253,6 @@ private void processMessageElement(String scope, MessageElement messageElement) } } - if (!fieldKeySet.isEmpty()) { nonReservedFields.put(scope + messageElement.getName(), fieldKeySet); } diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufMessage.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufMessage.java index 61b825e31a..f6a561ed89 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufMessage.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufMessage.java @@ -1,12 +1,13 @@ package io.apicurio.registry.utils.protobuf.schema; -import java.util.HashMap; -import java.util.Map; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import metadata.ProtobufSchemaMetadata; +import java.util.HashMap; +import java.util.Map; + public class ProtobufMessage { private static Map fieldDescriptorTypes; @@ -53,44 +54,19 @@ public DescriptorProto build() { return descriptorProtoBuilder.build(); } - public void addField( - String label, - String type, - String typeName, - String name, - int num, - String defaultVal, - String jsonName, - Boolean isDeprecated, - Boolean isPacked, - DescriptorProtos.FieldOptions.CType ctype, - DescriptorProtos.FieldOptions.JSType jsType, - String metadataKey, - String metadataValue, - Integer oneOfIndex, - Boolean isProto3Optional - ) { - descriptorProtoBuilder.addField( - buildFieldDescriptorProto(label, type, typeName, name, num, defaultVal, jsonName, isDeprecated, - isPacked, ctype, jsType, metadataKey, metadataValue, oneOfIndex, isProto3Optional) - ); + public void addField(String label, String type, String typeName, String name, int num, String defaultVal, + String jsonName, Boolean isDeprecated, Boolean isPacked, + DescriptorProtos.FieldOptions.CType ctype, DescriptorProtos.FieldOptions.JSType jsType, + String metadataKey, String metadataValue, Integer oneOfIndex, Boolean isProto3Optional) { + descriptorProtoBuilder.addField(buildFieldDescriptorProto(label, type, typeName, name, num, + defaultVal, jsonName, isDeprecated, isPacked, ctype, jsType, metadataKey, metadataValue, + oneOfIndex, isProto3Optional)); } - public static FieldDescriptorProto buildFieldDescriptorProto(String label, - String type, - String typeName, - String name, - int num, - String defaultVal, - String jsonName, - Boolean isDeprecated, - Boolean isPacked, - DescriptorProtos.FieldOptions.CType ctype, - DescriptorProtos.FieldOptions.JSType jsType, - String metadataKey, - String metadataValue, - Integer oneOfIndex, - Boolean isProto3Optional) { + public static FieldDescriptorProto buildFieldDescriptorProto(String label, String type, String typeName, + String name, int num, String defaultVal, String jsonName, Boolean isDeprecated, Boolean isPacked, + DescriptorProtos.FieldOptions.CType ctype, DescriptorProtos.FieldOptions.JSType jsType, + String metadataKey, String metadataValue, Integer oneOfIndex, Boolean isProto3Optional) { FieldDescriptorProto.Builder fieldBuilder = FieldDescriptorProto.newBuilder(); FieldDescriptorProto.Label protoLabel = fieldDescriptorLabels.get(label); if (label != null) { @@ -105,11 +81,11 @@ public static FieldDescriptorProto buildFieldDescriptorProto(String label, fieldDescriptorType = fieldDescriptorTypes.get(type); fieldBuilder.setType(fieldDescriptorType); } - if (fieldDescriptorType != null && - (fieldDescriptorType.equals(FieldDescriptorProto.Type.TYPE_MESSAGE) || fieldDescriptorType.equals( - FieldDescriptorProto.Type.TYPE_ENUM))) { - //References to other nested messages / enums / google.protobuf types start with "." - //See https://developers.google.com/protocol-buffers/docs/proto#packages_and_name_resolution + if (fieldDescriptorType != null + && (fieldDescriptorType.equals(FieldDescriptorProto.Type.TYPE_MESSAGE) + || fieldDescriptorType.equals(FieldDescriptorProto.Type.TYPE_ENUM))) { + // References to other nested messages / enums / google.protobuf types start with "." + // See https://developers.google.com/protocol-buffers/docs/proto#packages_and_name_resolution fieldBuilder.setTypeName(typeName.startsWith(".") ? typeName : "." + typeName); } else { fieldBuilder.setTypeName(typeName); diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchema.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchema.java index 5c4b532c79..ab2a48945a 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchema.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchema.java @@ -1,10 +1,10 @@ package io.apicurio.registry.utils.protobuf.schema; -import java.util.Objects; - import com.google.protobuf.Descriptors.FileDescriptor; import com.squareup.wire.schema.internal.parser.ProtoFileElement; +import java.util.Objects; + public class ProtobufSchema { private final FileDescriptor fileDescriptor; diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java index 08ccd118a4..46eb93308b 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java @@ -27,40 +27,23 @@ public class ProtobufSchemaLoader { private static final String GOOGLE_WELLKNOWN_PATH = "google/protobuf/"; private static final String METADATA_PATH = "metadata/"; private static final String DECIMAL_PATH = "additionalTypes/"; - //Adding pre-built support for commonly used Google API Protos, - //https://github.com/googleapis/googleapis - //These files need to be manually loaded into the FileSystem - //as Square doesn't support them by default. - private final static Set GOOGLE_API_PROTOS = - ImmutableSet.builder() - .add("money.proto") - .add("timeofday.proto") - .add("date.proto") - .add("calendar_period.proto") - .add("color.proto") - .add("dayofweek.proto") - .add("latlng.proto") - .add("fraction.proto") - .add("month.proto") - .add("phone_number.proto") - .add("postal_address.proto") - .add("localized_text.proto") - .add("interval.proto") - .add("expr.proto") - .add("quaternion.proto") - .build(); - //Adding support for Protobuf well-known types under package google.protobuf that are not covered by Square - //https://developers.google.com/protocol-buffers/docs/reference/google.protobuf - //These files need to be manually loaded into the FileSystem - //as Square doesn't support them by default. - private final static Set GOOGLE_WELLKNOWN_PROTOS = - ImmutableSet.builder() - .add("api.proto") - .add("field_mask.proto") - .add("source_context.proto") - .add("struct.proto") - .add("type.proto") - .build(); + // Adding pre-built support for commonly used Google API Protos, + // https://github.com/googleapis/googleapis + // These files need to be manually loaded into the FileSystem + // as Square doesn't support them by default. + private final static Set GOOGLE_API_PROTOS = ImmutableSet. builder().add("money.proto") + .add("timeofday.proto").add("date.proto").add("calendar_period.proto").add("color.proto") + .add("dayofweek.proto").add("latlng.proto").add("fraction.proto").add("month.proto") + .add("phone_number.proto").add("postal_address.proto").add("localized_text.proto") + .add("interval.proto").add("expr.proto").add("quaternion.proto").build(); + // Adding support for Protobuf well-known types under package google.protobuf that are not covered by + // Square + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf + // These files need to be manually loaded into the FileSystem + // as Square doesn't support them by default. + private final static Set GOOGLE_WELLKNOWN_PROTOS = ImmutableSet. builder() + .add("api.proto").add("field_mask.proto").add("source_context.proto").add("struct.proto") + .add("type.proto").build(); private final static String METADATA_PROTO = "metadata.proto"; private final static String DECIMAL_PROTO = "decimal.proto"; @@ -87,16 +70,17 @@ private static FileSystem getFileSystem() throws IOException { return inMemoryFileSystem; } - private static void loadProtoFiles(FakeFileSystem inMemoryFileSystem, ClassLoader classLoader, Set protos, - String protoPath) - throws IOException { + private static void loadProtoFiles(FakeFileSystem inMemoryFileSystem, ClassLoader classLoader, + Set protos, String protoPath) throws IOException { for (String proto : protos) { - //Loads the proto file resource files. + // Loads the proto file resource files. final InputStream inputStream = classLoader.getResourceAsStream(protoPath + proto); - final String fileContents = CharStreams.toString(new InputStreamReader(inputStream, Charsets.UTF_8)); + final String fileContents = CharStreams + .toString(new InputStreamReader(inputStream, Charsets.UTF_8)); final okio.Path path = okio.Path.get("/" + protoPath + "/" + proto); FileHandle fileHandle = inMemoryFileSystem.openReadWrite(path); - fileHandle.write(0, fileContents.getBytes(StandardCharsets.UTF_8), 0, fileContents.getBytes(StandardCharsets.UTF_8).length); + fileHandle.write(0, fileContents.getBytes(StandardCharsets.UTF_8), 0, + fileContents.getBytes(StandardCharsets.UTF_8).length); fileHandle.close(); } } @@ -115,31 +99,33 @@ private static String createDirectory(String[] dirs, FileSystem fileSystem) thro } /** - * Creates a schema loader using a in-memory file system. This is required for square wire schema parser and linker - * to load the types correctly. See https://github.com/square/wire/issues/2024# - * As of now this only supports reading one .proto file but can be extended to support reading multiple files. + * Creates a schema loader using a in-memory file system. This is required for square wire schema parser + * and linker to load the types correctly. See https://github.com/square/wire/issues/2024# As of now this + * only supports reading one .proto file but can be extended to support reading multiple files. + * * @param packageName Package name for the .proto if present * @param fileName Name of the .proto file. * @param schemaDefinition Schema Definition to parse. * @return Schema - parsed and properly linked Schema. */ - public static ProtobufSchemaLoaderContext loadSchema(Optional packageName, String fileName, String schemaDefinition) - throws IOException { + public static ProtobufSchemaLoaderContext loadSchema(Optional packageName, String fileName, + String schemaDefinition) throws IOException { return loadSchema(packageName, fileName, schemaDefinition, Collections.emptyMap()); } /** - * Creates a schema loader using a in-memory file system. This is required for square wire schema parser and linker - * to load the types correctly. See https://github.com/square/wire/issues/2024# - * As of now this only supports reading one .proto file but can be extended to support reading multiple files. + * Creates a schema loader using a in-memory file system. This is required for square wire schema parser + * and linker to load the types correctly. See https://github.com/square/wire/issues/2024# As of now this + * only supports reading one .proto file but can be extended to support reading multiple files. + * * @param packageName Package name for the .proto if present * @param fileName Name of the .proto file. * @param schemaDefinition Schema Definition to parse. * @param schemaDefinition Schema Definition to parse. * @return Schema - parsed and properly linked Schema. */ - public static ProtobufSchemaLoaderContext loadSchema(Optional packageName, String fileName, String schemaDefinition, Map deps) - throws IOException { + public static ProtobufSchemaLoaderContext loadSchema(Optional packageName, String fileName, + String schemaDefinition, Map deps) throws IOException { final FileSystem inMemoryFileSystem = getFileSystem(); String[] dirs = {}; @@ -164,14 +150,16 @@ public static ProtobufSchemaLoaderContext loadSchema(Optional packageNam // apply the same logic used for dirs of the root one depDirPath = createDirectory(packageNameDep.split("\\."), inMemoryFileSystem); } - writeFile(depSchema, depKey.substring(beforeFileName + 1), depDirPath, inMemoryFileSystem); + writeFile(depSchema, depKey.substring(beforeFileName + 1), depDirPath, + inMemoryFileSystem); } else { writeFile(depSchema, depKey, dirPath, inMemoryFileSystem); } } SchemaLoader schemaLoader = new SchemaLoader(inMemoryFileSystem); - schemaLoader.initRoots(Lists.newArrayList(Location.get("/")), Lists.newArrayList(Location.get("/"))); + schemaLoader.initRoots(Lists.newArrayList(Location.get("/")), + Lists.newArrayList(Location.get("/"))); Schema schema = schemaLoader.loadSchema(); ProtoFile protoFile = schema.protoFile(path.toString().replaceFirst("/", "")); @@ -186,7 +174,8 @@ public static ProtobufSchemaLoaderContext loadSchema(Optional packageNam } } - private static okio.Path writeFile(String schemaDefinition, String fileName, String dirPath, FileSystem inMemoryFileSystem) throws IOException { + private static okio.Path writeFile(String schemaDefinition, String fileName, String dirPath, + FileSystem inMemoryFileSystem) throws IOException { FileHandle fileHandle = null; try { String protoFileName = fileName.endsWith(".proto") ? fileName : fileName + ".proto"; diff --git a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java index 6cccc08691..1b561dfb22 100644 --- a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java +++ b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java @@ -9,6 +9,7 @@ import io.apicurio.registry.utils.protobuf.schema.syntax2.TestOrderingSyntax2; import io.apicurio.registry.utils.protobuf.schema.syntax2.TestSyntax2JavaPackage; import io.apicurio.registry.utils.protobuf.schema.syntax2.TestSyntax2OneOfs; +import io.apicurio.registry.utils.protobuf.schema.syntax2.WellKnownTypesTestSyntax2; import io.apicurio.registry.utils.protobuf.schema.syntax2.customoptions.TestSyntax2CustomOptions; import io.apicurio.registry.utils.protobuf.schema.syntax2.jsonname.TestSyntax2JsonName; import io.apicurio.registry.utils.protobuf.schema.syntax2.options.example.TestOrderingSyntax2OptionsExampleName; @@ -19,7 +20,6 @@ import io.apicurio.registry.utils.protobuf.schema.syntax3.TestSyntax3OneOfs; import io.apicurio.registry.utils.protobuf.schema.syntax3.TestSyntax3Optional; import io.apicurio.registry.utils.protobuf.schema.syntax3.WellKnownTypesTestSyntax3; -import io.apicurio.registry.utils.protobuf.schema.syntax2.WellKnownTypesTestSyntax2; import io.apicurio.registry.utils.protobuf.schema.syntax3.customoptions.TestSyntax3CustomOptions; import io.apicurio.registry.utils.protobuf.schema.syntax3.jsonname.TestSyntax3JsonName; import io.apicurio.registry.utils.protobuf.schema.syntax3.options.TestOrderingSyntax3Options; @@ -52,50 +52,34 @@ public class FileDescriptorUtilsTest { private static Stream testProtoFileProvider() { - return - Stream.of( - TestOrderingSyntax2.getDescriptor(), - TestOrderingSyntax2OptionsExampleName.getDescriptor(), - TestOrderingSyntax2Specified.getDescriptor(), - TestOrderingSyntax3.getDescriptor(), - TestOrderingSyntax3Options.getDescriptor(), - TestOrderingSyntax2References.getDescriptor(), - TestOrderingSyntax3References.getDescriptor(), - WellKnownTypesTestSyntax3.getDescriptor(), - WellKnownTypesTestSyntax2.getDescriptor(), - TestSyntax3Optional.getDescriptor(), - TestSyntax2OneOfs.getDescriptor(), - TestSyntax3OneOfs.getDescriptor(), - TestSyntax2JavaPackage.getDescriptor(), - TestSyntax3JavaPackage.getDescriptor(), - TestSyntax2CustomOptions.getDescriptor(), - TestSyntax3CustomOptions.getDescriptor() - ) - .map(Descriptors.FileDescriptor::getFile) - .map(Arguments::of); + return Stream.of(TestOrderingSyntax2.getDescriptor(), + TestOrderingSyntax2OptionsExampleName.getDescriptor(), + TestOrderingSyntax2Specified.getDescriptor(), TestOrderingSyntax3.getDescriptor(), + TestOrderingSyntax3Options.getDescriptor(), TestOrderingSyntax2References.getDescriptor(), + TestOrderingSyntax3References.getDescriptor(), WellKnownTypesTestSyntax3.getDescriptor(), + WellKnownTypesTestSyntax2.getDescriptor(), TestSyntax3Optional.getDescriptor(), + TestSyntax2OneOfs.getDescriptor(), TestSyntax3OneOfs.getDescriptor(), + TestSyntax2JavaPackage.getDescriptor(), TestSyntax3JavaPackage.getDescriptor(), + TestSyntax2CustomOptions.getDescriptor(), TestSyntax3CustomOptions.getDescriptor()) + .map(Descriptors.FileDescriptor::getFile).map(Arguments::of); } private static Stream testProtoFileProviderForJsonName() { - return - Stream.of( - TestSyntax2JsonName.getDescriptor(), - TestSyntax3JsonName.getDescriptor() - ) - .map(Descriptors.FileDescriptor::getFile) - .map(Arguments::of); + return Stream.of(TestSyntax2JsonName.getDescriptor(), TestSyntax3JsonName.getDescriptor()) + .map(Descriptors.FileDescriptor::getFile).map(Arguments::of); } private static Stream testParseWithDepsProtoFilesProvider() { ClassLoader classLoader = FileDescriptorUtilsTest.class.getClassLoader(); - File mainProtoFile = new File(Objects.requireNonNull(classLoader.getResource("parseWithDeps/producer.proto")).getFile()); + File mainProtoFile = new File( + Objects.requireNonNull(classLoader.getResource("parseWithDeps/producer.proto")).getFile()); // do the same with the deps - File[] deps = Stream.of( - "mypackage0/producerId.proto", - "mypackage2/version.proto", - "broken/helloworld.proto" - ).map(s -> new File(Objects.requireNonNull(classLoader.getResource("parseWithDeps/" + s)).getFile())).toArray(File[]::new); - return Stream.of( - Arguments.of(true, true, mainProtoFile, deps), + File[] deps = Stream + .of("mypackage0/producerId.proto", "mypackage2/version.proto", "broken/helloworld.proto") + .map(s -> new File( + Objects.requireNonNull(classLoader.getResource("parseWithDeps/" + s)).getFile())) + .toArray(File[]::new); + return Stream.of(Arguments.of(true, true, mainProtoFile, deps), Arguments.of(false, true, mainProtoFile, deps), Arguments.of(true, false, mainProtoFile, deps), Arguments.of(false, false, mainProtoFile, deps)); @@ -111,28 +95,32 @@ public void fileDescriptorToProtoFile_ParsesJsonNameOptionCorrectly() { String actualSchema = protoFile.toSchema(); - //TODO: Need a better way to compare schema strings. + // TODO: Need a better way to compare schema strings. assertTrue(actualSchema.contains(expectedFieldWithJsonName)); assertTrue(actualSchema.contains(expectedFieldWithoutJsonName)); } @ParameterizedTest @MethodSource("testProtoFileProvider") - public void ParsesFileDescriptorsAndRawSchemaIntoCanonicalizedForm_Accurately(Descriptors.FileDescriptor fileDescriptor) throws Exception { + public void ParsesFileDescriptorsAndRawSchemaIntoCanonicalizedForm_Accurately( + Descriptors.FileDescriptor fileDescriptor) throws Exception { DescriptorProtos.FileDescriptorProto fileDescriptorProto = fileDescriptor.toProto(); String actualSchema = FileDescriptorUtils.fileDescriptorToProtoFile(fileDescriptorProto).toSchema(); String fileName = fileDescriptorProto.getName(); String expectedSchema = ProtobufTestCaseReader.getRawSchema(fileName); - //Convert to Proto and compare - DescriptorProtos.FileDescriptorProto expectedFileDescriptorProto = schemaTextToFileDescriptor(expectedSchema, fileName).toProto(); - DescriptorProtos.FileDescriptorProto actualFileDescriptorProto = schemaTextToFileDescriptor(actualSchema, fileName).toProto(); + // Convert to Proto and compare + DescriptorProtos.FileDescriptorProto expectedFileDescriptorProto = schemaTextToFileDescriptor( + expectedSchema, fileName).toProto(); + DescriptorProtos.FileDescriptorProto actualFileDescriptorProto = schemaTextToFileDescriptor( + actualSchema, fileName).toProto(); assertEquals(expectedFileDescriptorProto, actualFileDescriptorProto, fileName); - //We are comparing the generated fileDescriptor against the original fileDescriptorProto generated by Protobuf compiler. - //TODO: Square library doesn't respect the ordering of OneOfs and Proto3 optionals. - //This will be fixed in upcoming square version, https://github.com/square/wire/pull/2046 + // We are comparing the generated fileDescriptor against the original fileDescriptorProto generated by + // Protobuf compiler. + // TODO: Square library doesn't respect the ordering of OneOfs and Proto3 optionals. + // This will be fixed in upcoming square version, https://github.com/square/wire/pull/2046 assertThat(expectedFileDescriptorProto).ignoringRepeatedFieldOrder().isEqualTo(fileDescriptorProto); } @@ -140,67 +128,54 @@ public void ParsesFileDescriptorsAndRawSchemaIntoCanonicalizedForm_Accurately(De @Test public void ParsesSchemasWithNoPackageNameSpecified() throws Exception { String schemaDefinition = "import \"google/protobuf/timestamp.proto\"; message Bar {optional google.protobuf.Timestamp c = 4; required int32 a = 1; optional string b = 2; }"; - String actualFileDescriptorProto = schemaTextToFileDescriptor(schemaDefinition, "anyFile.proto").toProto().toString(); + String actualFileDescriptorProto = schemaTextToFileDescriptor(schemaDefinition, "anyFile.proto") + .toProto().toString(); String expectedFileDescriptorProto = "name: \"anyFile.proto\"\n" - + "dependency: \"google/protobuf/timestamp.proto\"\n" - + "message_type {\n" - + " name: \"Bar\"\n" - + " field {\n" - + " name: \"c\"\n" - + " number: 4\n" - + " label: LABEL_OPTIONAL\n" - + " type: TYPE_MESSAGE\n" - + " type_name: \".google.protobuf.Timestamp\"\n" - + " }\n" - + " field {\n" - + " name: \"a\"\n" - + " number: 1\n" - + " label: LABEL_REQUIRED\n" - + " type: TYPE_INT32\n" - + " }\n" - + " field {\n" - + " name: \"b\"\n" - + " number: 2\n" - + " label: LABEL_OPTIONAL\n" - + " type: TYPE_STRING\n" - + " }\n" - + "}\n"; + + "dependency: \"google/protobuf/timestamp.proto\"\n" + "message_type {\n" + + " name: \"Bar\"\n" + " field {\n" + " name: \"c\"\n" + " number: 4\n" + + " label: LABEL_OPTIONAL\n" + " type: TYPE_MESSAGE\n" + + " type_name: \".google.protobuf.Timestamp\"\n" + " }\n" + " field {\n" + + " name: \"a\"\n" + " number: 1\n" + " label: LABEL_REQUIRED\n" + + " type: TYPE_INT32\n" + " }\n" + " field {\n" + " name: \"b\"\n" + " number: 2\n" + + " label: LABEL_OPTIONAL\n" + " type: TYPE_STRING\n" + " }\n" + "}\n"; assertEquals(expectedFileDescriptorProto, actualFileDescriptorProto); } - @ParameterizedTest @MethodSource("testParseWithDepsProtoFilesProvider") - public void testParseProtoFileAndDependenciesOnDifferentPackagesAndKnownType(boolean failFast, boolean readFiles, File mainProtoFile, File[] deps) - throws Descriptors.DescriptorValidationException, FileDescriptorUtils.ParseSchemaException, FileDescriptorUtils.ReadSchemaException { + public void testParseProtoFileAndDependenciesOnDifferentPackagesAndKnownType(boolean failFast, + boolean readFiles, File mainProtoFile, File[] deps) + throws Descriptors.DescriptorValidationException, FileDescriptorUtils.ParseSchemaException, + FileDescriptorUtils.ReadSchemaException { final Descriptors.FileDescriptor mainProtoFd; final Map requiredSchemaDeps = new HashMap<>(2); if (!readFiles) { if (failFast) { // it fail-fast by default - Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, () -> - FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, Set.of(deps)) - ); + Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, + () -> FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, + Set.of(deps))); return; } - mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, Set.of(deps), requiredSchemaDeps, false); + mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, Set.of(deps), + requiredSchemaDeps, false); } else { if (failFast) { // it fail-fast by default - Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, () -> - FileDescriptorUtils.parseProtoFileWithDependencies(readSchemaContent(mainProtoFile), readSchemaContents(deps)) - ); + Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, + () -> FileDescriptorUtils.parseProtoFileWithDependencies( + readSchemaContent(mainProtoFile), readSchemaContents(deps))); return; } - mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(readSchemaContent(mainProtoFile), readSchemaContents(deps), requiredSchemaDeps, false); + mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(readSchemaContent(mainProtoFile), + readSchemaContents(deps), requiredSchemaDeps, false); } - final Map expectedSchemaDeps = Map.of( - "mypackage0/producerId.proto", readSelectedFileSchemaAsString("producerId.proto", deps), - "mypackage2/version.proto", readSelectedFileSchemaAsString("version.proto", deps) - ); + final Map expectedSchemaDeps = Map.of("mypackage0/producerId.proto", + readSelectedFileSchemaAsString("producerId.proto", deps), "mypackage2/version.proto", + readSelectedFileSchemaAsString("version.proto", deps)); Assertions.assertEquals(expectedSchemaDeps, requiredSchemaDeps); Assertions.assertNotNull(mainProtoFd.findServiceByName("MyService")); Assertions.assertNotNull(mainProtoFd.findServiceByName("MyService").findMethodByName("Foo")); @@ -209,10 +184,7 @@ public void testParseProtoFileAndDependenciesOnDifferentPackagesAndKnownType(boo DynamicMessage.Builder builder = DynamicMessage.newBuilder(producer); builder.setField(producer.findFieldByName("name"), "name"); builder.setField(producer.findFieldByName("timestamp"), - Timestamp.newBuilder() - .setSeconds(1634123456) - .setNanos(789000000) - .build()); + Timestamp.newBuilder().setSeconds(1634123456).setNanos(789000000).build()); Descriptors.FieldDescriptor personId = producer.findFieldByName("id"); // assert that the id field is the expected msg type assertEquals("mypackage0.ProducerId", personId.getMessageType().getFullName()); @@ -221,17 +193,15 @@ public void testParseProtoFileAndDependenciesOnDifferentPackagesAndKnownType(boo // populate all the rest of the fields in the dynamic message builder.setField(personId, DynamicMessage.newBuilder(personId.getMessageType()) - .setField(versionId, - DynamicMessage.newBuilder(versionId.getMessageType()) - .setField(versionId.getMessageType().findFieldByName("id"), "id") - .build()) - .setField(personId.getMessageType().findFieldByName("name"), "name") - .build()); + .setField(versionId, DynamicMessage.newBuilder(versionId.getMessageType()) + .setField(versionId.getMessageType().findFieldByName("id"), "id").build()) + .setField(personId.getMessageType().findFieldByName("name"), "name").build()); assertNotNull(builder.build()); } private static Collection readSchemaContents(File[] files) { - return Arrays.stream(files).map(FileDescriptorUtilsTest::readSchemaContent).collect(Collectors.toList()); + return Arrays.stream(files).map(FileDescriptorUtilsTest::readSchemaContent) + .collect(Collectors.toList()); } private static FileDescriptorUtils.ProtobufSchemaContent readSchemaContent(File file) { @@ -239,9 +209,10 @@ private static FileDescriptorUtils.ProtobufSchemaContent readSchemaContent(File } private static String readSelectedFileSchemaAsString(String fileName, File[] files) { - return Stream.of(files).filter(f -> f.getName().equals(fileName)).collect(Collectors.reducing((a, b) -> { - throw new IllegalStateException("More than one file with name " + fileName + " found"); - })).map(FileDescriptorUtilsTest::readSchemaAsString).get(); + return Stream.of(files).filter(f -> f.getName().equals(fileName)) + .collect(Collectors.reducing((a, b) -> { + throw new IllegalStateException("More than one file with name " + fileName + " found"); + })).map(FileDescriptorUtilsTest::readSchemaAsString).get(); } private static String readSchemaAsString(File file) { @@ -252,32 +223,39 @@ private static String readSchemaAsString(File file) { } } - private Descriptors.FileDescriptor schemaTextToFileDescriptor(String schema, String fileName) throws Exception { - ProtoFileElement protoFileElement = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, schema); - return FileDescriptorUtils.protoFileToFileDescriptor(schema, fileName, Optional.ofNullable(protoFileElement.getPackageName())); + private Descriptors.FileDescriptor schemaTextToFileDescriptor(String schema, String fileName) + throws Exception { + ProtoFileElement protoFileElement = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, + schema); + return FileDescriptorUtils.protoFileToFileDescriptor(schema, fileName, + Optional.ofNullable(protoFileElement.getPackageName())); } @ParameterizedTest @MethodSource("testProtoFileProviderForJsonName") - public void ParsesFileDescriptorsAndRawSchemaIntoCanonicalizedForm_ForJsonName_Accurately - (Descriptors.FileDescriptor fileDescriptor) throws Exception { + public void ParsesFileDescriptorsAndRawSchemaIntoCanonicalizedForm_ForJsonName_Accurately( + Descriptors.FileDescriptor fileDescriptor) throws Exception { DescriptorProtos.FileDescriptorProto fileDescriptorProto = fileDescriptor.toProto(); String actualSchema = FileDescriptorUtils.fileDescriptorToProtoFile(fileDescriptorProto).toSchema(); String fileName = fileDescriptorProto.getName(); String expectedSchema = ProtobufTestCaseReader.getRawSchema(fileName); - //Convert to Proto and compare - DescriptorProtos.FileDescriptorProto expectedFileDescriptorProto = schemaTextToFileDescriptor(expectedSchema, fileName).toProto(); - DescriptorProtos.FileDescriptorProto actualFileDescriptorProto = schemaTextToFileDescriptor(actualSchema, fileName).toProto(); + // Convert to Proto and compare + DescriptorProtos.FileDescriptorProto expectedFileDescriptorProto = schemaTextToFileDescriptor( + expectedSchema, fileName).toProto(); + DescriptorProtos.FileDescriptorProto actualFileDescriptorProto = schemaTextToFileDescriptor( + actualSchema, fileName).toProto(); assertEquals(expectedFileDescriptorProto, actualFileDescriptorProto, fileName); - //We are comparing the generated fileDescriptor against the original fileDescriptorProto generated by Protobuf compiler. - //TODO: Square library doesn't respect the ordering of OneOfs and Proto3 optionals. - //This will be fixed in upcoming square version, https://github.com/square/wire/pull/2046 + // We are comparing the generated fileDescriptor against the original fileDescriptorProto generated by + // Protobuf compiler. + // TODO: Square library doesn't respect the ordering of OneOfs and Proto3 optionals. + // This will be fixed in upcoming square version, https://github.com/square/wire/pull/2046 - // This assertion is not working for json_name as the generation of FileDescriptorProto will always contain + // This assertion is not working for json_name as the generation of FileDescriptorProto will always + // contain // the json_name field as long as it is specifies (no matter it is default or non default) -// assertThat(expectedFileDescriptorProto).ignoringRepeatedFieldOrder().isEqualTo(fileDescriptorProto); + // assertThat(expectedFileDescriptorProto).ignoringRepeatedFieldOrder().isEqualTo(fileDescriptorProto); } } \ No newline at end of file diff --git a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/ProtobufTestCaseReader.java b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/ProtobufTestCaseReader.java index 223cb9e0ec..27f7b83c1f 100644 --- a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/ProtobufTestCaseReader.java +++ b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/ProtobufTestCaseReader.java @@ -10,7 +10,8 @@ public class ProtobufTestCaseReader { public static String getRawSchema(String fileName) { try { - return new String(Files.readAllBytes(Paths.get(TEST_PROTO_PATH, fileName)), StandardCharsets.UTF_8); + return new String(Files.readAllBytes(Paths.get(TEST_PROTO_PATH, fileName)), + StandardCharsets.UTF_8); } catch (IOException e) { throw new RuntimeException("Error reading file", e); } diff --git a/utils/tests/pom.xml b/utils/tests/pom.xml index 5d99743d63..211fa5ac4b 100644 --- a/utils/tests/pom.xml +++ b/utils/tests/pom.xml @@ -1,121 +1,119 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - - - apicurio-registry-utils-tests - jar - apicurio-registry-utils-tests - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-utils-tests + jar + apicurio-registry-utils-tests + - - - io.apicurio - apicurio-registry-common - - - - io.quarkus - quarkus-test-common - provided - - - - io.quarkus - quarkus-junit5 - provided - - - - org.apache.httpcomponents - httpclient - - - - org.junit.jupiter - junit-jupiter-api - provided - - - - org.slf4j - slf4j-api - provided - - - - com.github.dasniko - testcontainers-keycloak - provided - - - - org.keycloak - keycloak-admin-client-jakarta - provided - - - - io.zonky.test - embedded-postgres - provided - - - - org.testcontainers - mssqlserver - - - - io.strimzi - strimzi-test-container - - - - com.github.tomakehurst - wiremock-jre8 - - - - io.quarkus - quarkus-smallrye-jwt-build - - - org.jboss.slf4j - slf4j-jboss-logmanager - - - - - io.apicurio - apicurio-registry-java-sdk - - - io.apicurio - apicurio-common-rest-client-common - - + + org.apache.james + apache-mime4j-dom + 0.8.11 + + + org.apache.james + apache-mime4j-storage + 0.8.11 + - - - - org.apache.james - apache-mime4j-dom - 0.8.11 - - - org.apache.james - apache-mime4j-storage - 0.8.11 - - - + + + + + + io.apicurio + apicurio-registry-common + + + + io.quarkus + quarkus-test-common + provided + + + + io.quarkus + quarkus-junit5 + provided + + + + org.apache.httpcomponents + httpclient + + + + org.junit.jupiter + junit-jupiter-api + provided + + + + org.slf4j + slf4j-api + provided + + + + com.github.dasniko + testcontainers-keycloak + provided + + + + + org.keycloak + keycloak-admin-client-jakarta + provided + + + + io.zonky.test + embedded-postgres + provided + + + + org.testcontainers + mssqlserver + + + + io.strimzi + strimzi-test-container + + + + com.github.tomakehurst + wiremock-jre8 + + + + io.quarkus + quarkus-smallrye-jwt-build + + + org.jboss.slf4j + slf4j-jboss-logmanager + + + + + io.apicurio + apicurio-registry-java-sdk + + + io.apicurio + apicurio-common-rest-client-common + + + diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/ApicurioTestTags.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/ApicurioTestTags.java index 56e3b841d0..7418609231 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/ApicurioTestTags.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/ApicurioTestTags.java @@ -2,10 +2,13 @@ public class ApicurioTestTags { - /**Docker is required in the running machine to run this test. */ + /** Docker is required in the running machine to run this test. */ public static final String DOCKER = "docker"; - /**Test marked as slow. This usually means that this test uses a profile and therefore an application restart is required. */ + /** + * Test marked as slow. This usually means that this test uses a profile and therefore an application + * restart is required. + */ public static final String SLOW = "slow"; } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfile.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfile.java index 4071979537..6e6a8fb89c 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfile.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfile.java @@ -16,8 +16,7 @@ public Map getConfigOverrides() { @Override public List testResources() { if (!Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - return List.of( - new TestResourceEntry(JWKSMockServer.class)); + return List.of(new TestResourceEntry(JWKSMockServer.class)); } else { return Collections.emptyList(); } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAnonymousCredentials.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAnonymousCredentials.java index 45136651a7..f56cc98543 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAnonymousCredentials.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAnonymousCredentials.java @@ -10,12 +10,12 @@ public class AuthTestProfileAnonymousCredentials implements QuarkusTestProfile { @Override public Map getConfigOverrides() { - return Map.of("apicurio.auth.anonymous-read-access.enabled", "true", "smallrye.jwt.sign.key.location", "privateKey.jwk"); + return Map.of("apicurio.auth.anonymous-read-access.enabled", "true", "smallrye.jwt.sign.key.location", + "privateKey.jwk"); } @Override public List testResources() { - return Collections.singletonList( - new TestResourceEntry(JWKSMockServer.class)); + return Collections.singletonList(new TestResourceEntry(JWKSMockServer.class)); } } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAuthenticatedReadAccess.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAuthenticatedReadAccess.java index 2e89a6ef60..7922d7306e 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAuthenticatedReadAccess.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileAuthenticatedReadAccess.java @@ -10,12 +10,12 @@ public class AuthTestProfileAuthenticatedReadAccess implements QuarkusTestProfil @Override public Map getConfigOverrides() { - return Map.of("apicurio.auth.authenticated-read-access.enabled", "true", "smallrye.jwt.sign.key.location", "privateKey.jwk"); + return Map.of("apicurio.auth.authenticated-read-access.enabled", "true", + "smallrye.jwt.sign.key.location", "privateKey.jwk"); } @Override public List testResources() { - return Collections.singletonList( - new TestResourceEntry(JWKSMockServer.class)); + return Collections.singletonList(new TestResourceEntry(JWKSMockServer.class)); } } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithHeaderRoles.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithHeaderRoles.java index b1a0829e66..86c30f6d9b 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithHeaderRoles.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithHeaderRoles.java @@ -10,12 +10,12 @@ public class AuthTestProfileWithHeaderRoles implements QuarkusTestProfile { @Override public Map getConfigOverrides() { - return Map.of("smallrye.jwt.sign.key.location", "privateKey.jwk", "apicurio.auth.role-source", "header"); + return Map.of("smallrye.jwt.sign.key.location", "privateKey.jwk", "apicurio.auth.role-source", + "header"); } @Override public List testResources() { - return Collections.singletonList( - new QuarkusTestProfile.TestResourceEntry(JWKSMockServer.class)); + return Collections.singletonList(new QuarkusTestProfile.TestResourceEntry(JWKSMockServer.class)); } } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithLocalRoles.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithLocalRoles.java index 6a8e8f13b7..7f0b95b597 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithLocalRoles.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/AuthTestProfileWithLocalRoles.java @@ -1,21 +1,21 @@ package io.apicurio.registry.utils.tests; +import io.quarkus.test.junit.QuarkusTestProfile; + import java.util.Collections; import java.util.List; import java.util.Map; -import io.quarkus.test.junit.QuarkusTestProfile; - public class AuthTestProfileWithLocalRoles implements QuarkusTestProfile { @Override public Map getConfigOverrides() { - return Map.of("smallrye.jwt.sign.key.location", "privateKey.jwk", "apicurio.auth.role-source", "application"); + return Map.of("smallrye.jwt.sign.key.location", "privateKey.jwk", "apicurio.auth.role-source", + "application"); } @Override public List testResources() { - return Collections.singletonList( - new TestResourceEntry(JWKSMockServer.class)); + return Collections.singletonList(new TestResourceEntry(JWKSMockServer.class)); } } \ No newline at end of file diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/JWKSMockServer.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/JWKSMockServer.java index fca4bcde75..f3aa435ab8 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/JWKSMockServer.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/JWKSMockServer.java @@ -1,5 +1,17 @@ package io.apicurio.registry.utils.tests; +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; +import com.github.tomakehurst.wiremock.client.WireMock; +import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; +import io.smallrye.jwt.build.Jwt; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; + import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.get; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; @@ -7,19 +19,6 @@ import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static io.vertx.ext.auth.impl.Codec.base64Encode; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; -import com.github.tomakehurst.wiremock.client.WireMock; -import io.smallrye.jwt.build.Jwt; - public class JWKSMockServer implements QuarkusTestResourceLifecycleManager { static final Logger LOGGER = LoggerFactory.getLogger(JWKSMockServer.class); @@ -44,52 +43,37 @@ public class JWKSMockServer implements QuarkusTestResourceLifecycleManager { public static String BASIC_USER_A = "sr-test-user-a"; public static String BASIC_USER_B = "sr-test-user-b"; - @Override public Map start() { - server = new WireMockServer( - wireMockConfig() - .dynamicPort()); + server = new WireMockServer(wireMockConfig().dynamicPort()); server.start(); - server.stubFor( - get(urlMatching("/auth/realms/" + realm + "/.well-known/uma2-configuration")) - .willReturn(wellKnownResponse())); - server.stubFor( - get(urlMatching("/auth/realms/" + realm + "/.well-known/openid-configuration")) - .willReturn(wellKnownResponse())); - - server.stubFor( - get(urlEqualTo("/auth/realms/" + realm + "/protocol/openid-connect/certs")) - .willReturn(aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"keys\" : [\n" + - " {\n" + - " \"kid\": \"1\",\n" + - " \"kty\":\"RSA\",\n" + - " \"n\":\"iJw33l1eVAsGoRlSyo-FCimeOc-AaZbzQ2iESA3Nkuo3TFb1zIkmt0kzlnWVGt48dkaIl13Vdefh9hqw_r9yNF8xZqX1fp0PnCWc5M_TX_ht5fm9y0TpbiVmsjeRMWZn4jr3DsFouxQ9aBXUJiu26V0vd2vrECeeAreFT4mtoHY13D2WVeJvboc5mEJcp50JNhxRCJ5UkY8jR_wfUk2Tzz4-fAj5xQaBccXnqJMu_1C6MjoCEiB7G1d13bVPReIeAGRKVJIF6ogoCN8JbrOhc_48lT4uyjbgnd24beatuKWodmWYhactFobRGYo5551cgMe8BoxpVQ4to30cGA0qjQ\",\n" - + - " \"e\":\"AQAB\"\n" + - " }\n" + - " ]\n" + - "}"))); - - //Admin user stub + server.stubFor(get(urlMatching("/auth/realms/" + realm + "/.well-known/uma2-configuration")) + .willReturn(wellKnownResponse())); + server.stubFor(get(urlMatching("/auth/realms/" + realm + "/.well-known/openid-configuration")) + .willReturn(wellKnownResponse())); + + server.stubFor(get(urlEqualTo("/auth/realms/" + realm + "/protocol/openid-connect/certs")).willReturn( + aResponse().withHeader("Content-Type", "application/json").withBody("{\n" + " \"keys\" : [\n" + + " {\n" + " \"kid\": \"1\",\n" + " \"kty\":\"RSA\",\n" + + " \"n\":\"iJw33l1eVAsGoRlSyo-FCimeOc-AaZbzQ2iESA3Nkuo3TFb1zIkmt0kzlnWVGt48dkaIl13Vdefh9hqw_r9yNF8xZqX1fp0PnCWc5M_TX_ht5fm9y0TpbiVmsjeRMWZn4jr3DsFouxQ9aBXUJiu26V0vd2vrECeeAreFT4mtoHY13D2WVeJvboc5mEJcp50JNhxRCJ5UkY8jR_wfUk2Tzz4-fAj5xQaBccXnqJMu_1C6MjoCEiB7G1d13bVPReIeAGRKVJIF6ogoCN8JbrOhc_48lT4uyjbgnd24beatuKWodmWYhactFobRGYo5551cgMe8BoxpVQ4to30cGA0qjQ\",\n" + + " \"e\":\"AQAB\"\n" + " }\n" + " ]\n" + "}"))); + + // Admin user stub stubForClient(ADMIN_CLIENT_ID); - //Stub for clients with credentials as header + // Stub for clients with credentials as header stubForClient(ADMIN_CLIENT_ID, "test1"); - //Developer user stub + // Developer user stub stubForClient(DEVELOPER_CLIENT_ID, "test1"); stubForClient(DEVELOPER_2_CLIENT_ID, "test1"); stubForClient(DEVELOPER_CLIENT_ID, "test1"); stubForClient(DEVELOPER_2_CLIENT_ID, "test1"); - //Read only user stub + // Read only user stub stubForClient(READONLY_CLIENT_ID, "test1"); stubForClient(READONLY_CLIENT_ID, "test1"); - //Token without roles stub + // Token without roles stub stubForClient(NO_ROLE_CLIENT_ID, "test1"); stubForClient(NO_ROLE_CLIENT_ID, "test1"); @@ -101,11 +85,12 @@ public Map start() { this.authServerUrl = server.baseUrl() + "/auth" + "/realms/" + realm; LOGGER.info("Keycloak started in mock mode: {}", authServerUrl); - this.tokenEndpoint = server.baseUrl() + "/auth" + "/realms/" + realm + "/protocol/openid-connect/token/"; + this.tokenEndpoint = server.baseUrl() + "/auth" + "/realms/" + realm + + "/protocol/openid-connect/token/"; Map props = new HashMap<>(); - //Set registry properties + // Set registry properties props.put("quarkus.oidc.auth-server-url", authServerUrl); props.put("quarkus.oidc.token-path", tokenEndpoint); props.put("quarkus.oidc.tenant-enabled", "true"); @@ -118,13 +103,10 @@ public Map start() { } private ResponseDefinitionBuilder wellKnownResponse() { - return aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"jwks_uri\": \"" + server.baseUrl() - + "/auth/realms/" + realm + "/protocol/openid-connect/certs\",\n" - + " \"token_endpoint\": \"" + server.baseUrl() + "/auth/realms/" + realm + "/protocol/openid-connect/token/\" " - + "}"); + return aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"jwks_uri\": \"" + server.baseUrl() + "/auth/realms/" + realm + + "/protocol/openid-connect/certs\",\n" + " \"token_endpoint\": \"" + server.baseUrl() + + "/auth/realms/" + realm + "/protocol/openid-connect/token/\" " + "}"); } private String generateJwtToken(String userName, String orgId) { @@ -144,130 +126,98 @@ private String generateJwtToken(String userName, String orgId) { b.claim("rh-org-id", orgId); } - return b.jws() - .keyId("1") - .sign(); + return b.jws().keyId("1").sign(); } private void stubForBasicUser(String username, String password) { - //TODO:carnalca this will be revisited with the auth refactor + // TODO:carnalca this will be revisited with the auth refactor server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withRequestBody(WireMock.containing("client_id=" + username)) .withRequestBody(WireMock.containing("client_secret=" + password)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(ADMIN_CLIENT_ID, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(ADMIN_CLIENT_ID, null) + + "\",\n" + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token/") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withRequestBody(WireMock.containing("client_id=" + BASIC_USER)) .withRequestBody(WireMock.containing("client_secret=" + BASIC_PASSWORD)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(ADMIN_CLIENT_ID, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(ADMIN_CLIENT_ID, null) + + "\",\n" + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); } private void stubForClient(String client) { - //TODO:carnalca this will be revisited with the auth refactor + // TODO:carnalca this will be revisited with the auth refactor server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withRequestBody(WireMock.containing("client_id=" + client)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(client, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(client, null) + "\",\n" + + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token/") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withRequestBody(WireMock.containing("client_id=" + client)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(client, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(client, null) + "\",\n" + + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); } private void stubForClient(String client, String clientSecret) { - //TODO:carnalca this will be revisited with the auth refactor + // TODO:carnalca this will be revisited with the auth refactor server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token") .withHeader("Authorization", WireMock.containing(buildBasicAuthHeader(client, clientSecret))) .withRequestBody(WireMock.containing("grant_type=client_credentials")) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(client, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(client, null) + "\",\n" + + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token/") .withHeader("Authorization", WireMock.containing(buildBasicAuthHeader(client, clientSecret))) .withRequestBody(WireMock.containing("grant_type=client_credentials")) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withBody("{\n" + - " \"access_token\": \"" - + generateJwtToken(client, null) + "\",\n" + - " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + - " \"token_type\": \"bearer\"\n" + - "}"))); + .willReturn(WireMock.aResponse().withHeader("Content-Type", "application/json") + .withBody("{\n" + " \"access_token\": \"" + generateJwtToken(client, null) + "\",\n" + + " \"refresh_token\": \"07e08903-1263-4dd1-9fd1-4a59b0db5283\",\n" + + " \"token_type\": \"bearer\"\n" + "}"))); } private void stubForClientWithWrongCreds(String client, String clientSecret) { - //TODO:carnalca this will be revisited with the auth refactor + // TODO:carnalca this will be revisited with the auth refactor server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withHeader("Authorization", WireMock.containing(buildBasicAuthHeader(client, clientSecret))) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withStatus(401))); + .willReturn( + WireMock.aResponse().withHeader("Content-Type", "application/json").withStatus(401))); server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token/") .withRequestBody(WireMock.containing("grant_type=client_credentials")) .withHeader("Authorization", WireMock.containing(buildBasicAuthHeader(client, clientSecret))) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withStatus(401))); + .willReturn( + WireMock.aResponse().withHeader("Content-Type", "application/json").withStatus(401))); - //Wrong credentials stub + // Wrong credentials stub server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token") .withRequestBody(WireMock.containing("grant_type=client_credentials")) - .withRequestBody(WireMock.containing("client_id=" + WRONG_CREDS_CLIENT_ID)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withStatus(401))); + .withRequestBody(WireMock.containing("client_id=" + WRONG_CREDS_CLIENT_ID)).willReturn( + WireMock.aResponse().withHeader("Content-Type", "application/json").withStatus(401))); - //Wrong credentials stub + // Wrong credentials stub server.stubFor(WireMock.post("/auth/realms/" + realm + "/protocol/openid-connect/token/") .withRequestBody(WireMock.containing("grant_type=client_credentials")) - .withRequestBody(WireMock.containing("client_id=" + WRONG_CREDS_CLIENT_ID)) - .willReturn(WireMock.aResponse() - .withHeader("Content-Type", "application/json") - .withStatus(401))); + .withRequestBody(WireMock.containing("client_id=" + WRONG_CREDS_CLIENT_ID)).willReturn( + WireMock.aResponse().withHeader("Content-Type", "application/json").withStatus(401))); } private String buildBasicAuthHeader(String username, String password) { - String basic = username+ ":" + password; - return "Basic " + base64Encode(basic.getBytes(StandardCharsets.UTF_8)); + String basic = username + ":" + password; + return "Basic " + base64Encode(basic.getBytes(StandardCharsets.UTF_8)); } public synchronized void stop() { diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkaTestContainerManager.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkaTestContainerManager.java index 64ef460b36..df0d0432ac 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkaTestContainerManager.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkaTestContainerManager.java @@ -37,8 +37,7 @@ public Map start() { System.setProperty("bootstrap.servers.external", externalBootstrapServers); - return Map.of( - "bootstrap.servers", externalBootstrapServers, + return Map.of("bootstrap.servers", externalBootstrapServers, "apicurio.events.kafka.config.bootstrap.servers", externalBootstrapServers, "apicurio.kafkasql.bootstrap.servers", externalBootstrapServers); } else { diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlRecoverFromSnapshotTestProfile.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlRecoverFromSnapshotTestProfile.java index dd806ebab3..3ff76d54d3 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlRecoverFromSnapshotTestProfile.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlRecoverFromSnapshotTestProfile.java @@ -11,16 +11,15 @@ public class KafkasqlRecoverFromSnapshotTestProfile implements QuarkusTestProfil @Override public Map getConfigOverrides() { - return Map.of("apicurio.storage.kind", "kafkasql", "apicurio.datasource.url", "jdbc:h2:mem:" + UUID.randomUUID()); + return Map.of("apicurio.storage.kind", "kafkasql", "apicurio.datasource.url", + "jdbc:h2:mem:" + UUID.randomUUID()); } @Override public List testResources() { if (!Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - return List.of( - new TestResourceEntry(KafkaTestContainerManager.class)); - } - else { + return List.of(new TestResourceEntry(KafkaTestContainerManager.class)); + } else { return Collections.emptyList(); } } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlTestProfile.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlTestProfile.java index 57c9ec3b11..00e174acd8 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlTestProfile.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/KafkasqlTestProfile.java @@ -16,10 +16,8 @@ public Map getConfigOverrides() { @Override public List testResources() { if (!Boolean.parseBoolean(System.getProperty("cluster.tests"))) { - return List.of( - new TestResourceEntry(KafkaTestContainerManager.class)); - } - else { + return List.of(new TestResourceEntry(KafkaTestContainerManager.class)); + } else { return Collections.emptyList(); } } diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/MsSqlEmbeddedTestResource.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/MsSqlEmbeddedTestResource.java index fe5ca28534..9c69c2309e 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/MsSqlEmbeddedTestResource.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/MsSqlEmbeddedTestResource.java @@ -10,14 +10,14 @@ import java.util.Map; public class MsSqlEmbeddedTestResource implements QuarkusTestResourceLifecycleManager { - + private static final String DB_PASSWORD = "P4ssw0rd!#"; - private static final DockerImageName IMAGE = DockerImageName.parse("mcr.microsoft.com/mssql/server").withTag("2022-latest"); - private MSSQLServerContainer database = new MSSQLServerContainer<>(IMAGE) - .withPassword(DB_PASSWORD) + private static final DockerImageName IMAGE = DockerImageName.parse("mcr.microsoft.com/mssql/server") + .withTag("2022-latest"); + private MSSQLServerContainer database = new MSSQLServerContainer<>(IMAGE).withPassword(DB_PASSWORD) .acceptLicense(); - + /** * Constructor. */ diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/PostgreSqlEmbeddedTestResource.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/PostgreSqlEmbeddedTestResource.java index 3be01b36fe..8a116d2506 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/PostgreSqlEmbeddedTestResource.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/PostgreSqlEmbeddedTestResource.java @@ -38,7 +38,8 @@ public Map start() { } private static boolean isPostgresqlStorage() { - return ConfigProvider.getConfig().getValue("apicurio.storage.sql.kind", String.class).equals("postgresql"); + return ConfigProvider.getConfig().getValue("apicurio.storage.sql.kind", String.class) + .equals("postgresql"); } private Map startPostgresql() { diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/SimpleDisplayName.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/SimpleDisplayName.java index 86aa982b80..abb2a0f80b 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/SimpleDisplayName.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/SimpleDisplayName.java @@ -1,9 +1,9 @@ package io.apicurio.registry.utils.tests; -import java.lang.reflect.Method; - import org.junit.jupiter.api.DisplayNameGenerator; +import java.lang.reflect.Method; + public class SimpleDisplayName extends DisplayNameGenerator.ReplaceUnderscores { @Override diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TestUtils.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TestUtils.java index a83dd66ef9..e4e8b35cb2 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TestUtils.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TestUtils.java @@ -1,6 +1,5 @@ package io.apicurio.registry.utils.tests; - import com.microsoft.kiota.ApiException; import io.apicurio.registry.rest.client.models.CreateArtifact; import io.apicurio.registry.rest.client.models.CreateVersion; @@ -37,9 +36,12 @@ public class TestUtils { private static final String DEFAULT_REGISTRY_HOST = "localhost"; private static final int DEFAULT_REGISTRY_PORT = 8081; - private static final String REGISTRY_HOST = System.getenv().getOrDefault("REGISTRY_HOST", DEFAULT_REGISTRY_HOST); - private static final int REGISTRY_PORT = Integer.parseInt(System.getenv().getOrDefault("REGISTRY_PORT", String.valueOf(DEFAULT_REGISTRY_PORT))); - private static final String EXTERNAL_REGISTRY = System.getenv().getOrDefault("EXTERNAL_REGISTRY", "false"); + private static final String REGISTRY_HOST = System.getenv().getOrDefault("REGISTRY_HOST", + DEFAULT_REGISTRY_HOST); + private static final int REGISTRY_PORT = Integer + .parseInt(System.getenv().getOrDefault("REGISTRY_PORT", String.valueOf(DEFAULT_REGISTRY_PORT))); + private static final String EXTERNAL_REGISTRY = System.getenv().getOrDefault("EXTERNAL_REGISTRY", + "false"); private TestUtils() { // All static methods @@ -113,7 +115,7 @@ public static boolean isReachable() { log.info("Trying to connect to {}:{}", host, port); socket.connect(new InetSocketAddress(host, port), 5_000); log.info("Client is able to connect to Registry instance"); - return true; + return true; } catch (IOException ex) { log.warn("Cannot connect to Registry instance: {}", ex.getMessage()); return false; // Either timeout or unreachable or failed DNS lookup. @@ -122,6 +124,7 @@ public static boolean isReachable() { /** * Generic check if an endpoint is network reachable + * * @param host * @param port * @param component @@ -132,14 +135,13 @@ public static boolean isReachable(String host, int port, String component) { log.info("Trying to connect to {}:{}", host, port); socket.connect(new InetSocketAddress(host, port), 5_000); log.info("Client is able to connect to " + component); - return true; + return true; } catch (IOException ex) { log.warn("Cannot connect to {}: {}", component, ex.getMessage()); return false; // Either timeout or unreachable or failed DNS lookup. } } - /** * Checks the readniess endpoint of the registry * @@ -159,7 +161,8 @@ public static boolean isReady(boolean logResponse) { */ public static boolean isReady(String baseUrl, String healthUrl, boolean logResponse, String component) { try { - CloseableHttpResponse res = HttpClients.createMinimal().execute(new HttpGet(baseUrl.concat(healthUrl))); + CloseableHttpResponse res = HttpClients.createMinimal() + .execute(new HttpGet(baseUrl.concat(healthUrl))); boolean ok = res.getStatusLine().getStatusCode() == HttpStatus.SC_OK; if (ok) { log.info(component + " is ready"); @@ -178,16 +181,19 @@ public static boolean isReady(String baseUrl, String healthUrl, boolean logRespo /** * Poll the given {@code ready} function every {@code pollIntervalMs} milliseconds until it returns true, - * or throw a TimeoutException if it doesn't returns true within {@code timeoutMs} milliseconds. - * (helpful if you have several calls which need to share a common timeout) + * or throw a TimeoutException if it doesn't returns true within {@code timeoutMs} milliseconds. (helpful + * if you have several calls which need to share a common timeout) * * @return The remaining time left until timeout occurs */ - public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready) throws TimeoutException { - return waitFor(description, pollIntervalMs, timeoutMs, ready, () -> {}); + public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready) + throws TimeoutException { + return waitFor(description, pollIntervalMs, timeoutMs, ready, () -> { + }); } - public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready, Runnable onTimeout) throws TimeoutException { + public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready, + Runnable onTimeout) throws TimeoutException { log.debug("Waiting for {}", description); long deadline = System.currentTimeMillis() + timeoutMs; while (true) { @@ -203,13 +209,15 @@ public static long waitFor(String description, long pollIntervalMs, long timeout } if (timeLeft <= 0) { onTimeout.run(); - TimeoutException exception = new TimeoutException("Timeout after " + timeoutMs + " ms waiting for " + description); + TimeoutException exception = new TimeoutException( + "Timeout after " + timeoutMs + " ms waiting for " + description); exception.printStackTrace(); throw exception; } long sleepTime = Math.min(pollIntervalMs, timeLeft); if (log.isTraceEnabled()) { - log.trace("{} not ready, will try again in {} ms ({}ms till timeout)", description, sleepTime, timeLeft); + log.trace("{} not ready, will try again in {} ms ({}ms till timeout)", description, sleepTime, + timeLeft); } try { Thread.sleep(sleepTime); @@ -223,7 +231,7 @@ public static long waitFor(String description, long pollIntervalMs, long timeout * Method to create and write String content file. * * @param filePath path to file - * @param text content + * @param text content */ public static void writeFile(String filePath, String text) { try { @@ -261,7 +269,8 @@ public static String generateGroupId() { return UUID.randomUUID().toString(); } - public static CreateArtifact clientCreateArtifact(String artifactId, String artifactType, String content, String contentType) { + public static CreateArtifact clientCreateArtifact(String artifactId, String artifactType, String content, + String contentType) { CreateArtifact createArtifact = new CreateArtifact(); createArtifact.setArtifactId(artifactId); createArtifact.setArtifactType(artifactType); @@ -272,20 +281,15 @@ public static CreateArtifact clientCreateArtifact(String artifactId, String arti return createArtifact; } - public static io.apicurio.registry.rest.v3.beans.CreateArtifact serverCreateArtifact(String artifactId, String artifactType, String content, String contentType) { - return io.apicurio.registry.rest.v3.beans.CreateArtifact.builder() - .artifactId(artifactId) - .artifactType(artifactType) - .firstVersion( - io.apicurio.registry.rest.v3.beans.CreateVersion.builder() - .content( - io.apicurio.registry.rest.v3.beans.VersionContent.builder() - .contentType(contentType) - .content(content) - .build() - ) - .build() - ) + public static io.apicurio.registry.rest.v3.beans.CreateArtifact serverCreateArtifact(String artifactId, + String artifactType, String content, String contentType) { + return io.apicurio.registry.rest.v3.beans.CreateArtifact + .builder().artifactId(artifactId).artifactType( + artifactType) + .firstVersion(io.apicurio.registry.rest.v3.beans.CreateVersion.builder() + .content(io.apicurio.registry.rest.v3.beans.VersionContent.builder() + .contentType(contentType).content(content).build()) + .build()) .build(); } @@ -297,14 +301,11 @@ public static CreateVersion clientCreateVersion(String content, String contentTy return createVersion; } - public static io.apicurio.registry.rest.v3.beans.CreateVersion serverCreateVersion(String content, String contentType) { + public static io.apicurio.registry.rest.v3.beans.CreateVersion serverCreateVersion(String content, + String contentType) { return io.apicurio.registry.rest.v3.beans.CreateVersion.builder() - .content( - io.apicurio.registry.rest.v3.beans.VersionContent.builder() - .contentType(contentType) - .content(content) - .build() - ) + .content(io.apicurio.registry.rest.v3.beans.VersionContent.builder().contentType(contentType) + .content(content).build()) .build(); } @@ -357,7 +358,8 @@ private static T retry(Callable callable, String name, int maxRetries) th throw new IllegalStateException("Should not be here!"); } - public static void assertClientError(String expectedErrorName, int expectedCode, RunnableExc runnable, Function errorCodeExtractor) throws Exception { + public static void assertClientError(String expectedErrorName, int expectedCode, RunnableExc runnable, + Function errorCodeExtractor) throws Exception { try { internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor); } catch (Exception e) { @@ -365,21 +367,26 @@ public static void assertClientError(String expectedErrorName, int expectedCode, } } - public static void assertClientError(String expectedErrorName, int expectedCode, RunnableExc runnable, boolean retry, Function errorCodeExtractor) throws Exception { + public static void assertClientError(String expectedErrorName, int expectedCode, RunnableExc runnable, + boolean retry, Function errorCodeExtractor) throws Exception { if (retry) { - retry(() -> internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor)); + retry(() -> internalAssertClientError(expectedErrorName, expectedCode, runnable, + errorCodeExtractor)); } else { internalAssertClientError(expectedErrorName, expectedCode, runnable, errorCodeExtractor); } } - private static void internalAssertClientError(String expectedErrorName, int expectedCode, RunnableExc runnable, Function errorCodeExtractor) { + private static void internalAssertClientError(String expectedErrorName, int expectedCode, + RunnableExc runnable, Function errorCodeExtractor) { try { runnable.run(); - Assertions.fail("Expected (but didn't get) a registry client application exception with code: " + expectedCode); + Assertions.fail("Expected (but didn't get) a registry client application exception with code: " + + expectedCode); } catch (Exception ex) { if (ex instanceof io.apicurio.registry.rest.client.models.Error) { - Assertions.assertEquals(expectedErrorName, ((io.apicurio.registry.rest.client.models.Error) ex).getName(), () -> "ex: " + ex); + Assertions.assertEquals(expectedErrorName, + ((io.apicurio.registry.rest.client.models.Error) ex).getName(), () -> "ex: " + ex); Assertions.assertEquals(expectedCode, errorCodeExtractor.apply(ex)); } else { Assertions.assertEquals(expectedCode, ((ApiException) ex).getResponseStatusCode()); @@ -393,7 +400,8 @@ public static void waitForSchema(Predicate schemaFinder, byte[] bytes) thr waitForSchema(schemaFinder, bytes, ByteBuffer::getLong); } - public static void waitForSchema(Predicate schemaFinder, byte[] bytes, Function globalIdExtractor) throws Exception { + public static void waitForSchema(Predicate schemaFinder, byte[] bytes, + Function globalIdExtractor) throws Exception { waitForSchemaCustom(schemaFinder, bytes, input -> { ByteBuffer buffer = ByteBuffer.wrap(input); buffer.get(); // magic byte @@ -402,7 +410,8 @@ public static void waitForSchema(Predicate schemaFinder, byte[] bytes, Fun } // we can have non-default Apicurio serialization; e.g. ExtJsonConverter - public static void waitForSchemaCustom(Predicate schemaFinder, byte[] bytes, Function globalIdExtractor) throws Exception { + public static void waitForSchemaCustom(Predicate schemaFinder, byte[] bytes, + Function globalIdExtractor) throws Exception { long id = globalIdExtractor.apply(bytes); boolean schemaExists = retry(() -> schemaFinder.test(id)); Assertions.assertTrue(schemaExists); // wait for global id to populate diff --git a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TooManyRequestsMock.java b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TooManyRequestsMock.java index 160f0f6e68..d41298f47a 100644 --- a/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TooManyRequestsMock.java +++ b/utils/tests/src/main/java/io/apicurio/registry/utils/tests/TooManyRequestsMock.java @@ -1,16 +1,15 @@ package io.apicurio.registry.utils.tests; -import static com.github.tomakehurst.wiremock.client.WireMock.any; -import static com.github.tomakehurst.wiremock.client.WireMock.anyUrl; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.WireMockServer; import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.anyUrl; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; public class TooManyRequestsMock { @@ -19,28 +18,19 @@ public class TooManyRequestsMock { private WireMockServer server; public void start() { - server = new WireMockServer( - wireMockConfig() - .dynamicPort()); + server = new WireMockServer(wireMockConfig().dynamicPort()); server.start(); -// kind: "Error" -// id: "429" -// code: "SERVICEREGISTRY-429" -// reason: "Too Many Requests" + // kind: "Error" + // id: "429" + // code: "SERVICEREGISTRY-429" + // reason: "Too Many Requests" - JsonNode body = new ObjectMapper().createObjectNode() - .put("kind", "Error") - .put("id", "429") - .put("code", "SERVICEREGISTRY-429") - .put("reason", "Too Many Requests"); + JsonNode body = new ObjectMapper().createObjectNode().put("kind", "Error").put("id", "429") + .put("code", "SERVICEREGISTRY-429").put("reason", "Too Many Requests"); server.stubFor( - any(anyUrl()) - .willReturn( - new ResponseDefinitionBuilder().withStatus(429) - .withJsonBody(body)) - ); + any(anyUrl()).willReturn(new ResponseDefinitionBuilder().withStatus(429).withJsonBody(body))); } public String getMockUrl() { diff --git a/utils/tools/pom.xml b/utils/tools/pom.xml index d7f8c879ec..7e0568a298 100644 --- a/utils/tools/pom.xml +++ b/utils/tools/pom.xml @@ -1,48 +1,45 @@ - - - 4.0.0 - - io.apicurio - apicurio-registry - 3.0.0-SNAPSHOT - ../../pom.xml - - - apicurio-registry-utils-tools - jar - apicurio-registry-utils-tools - - - - - io.apicurio - apicurio-data-models - - - - org.apache.kafka - kafka-streams - - - - commons-io - commons-io - - - - org.jboss.slf4j - slf4j-jboss-logging - ${jboss-slf4j.version} - - - - org.junit.jupiter - junit-jupiter-api - test - - - + + + 4.0.0 + + io.apicurio + apicurio-registry + 3.0.0-SNAPSHOT + ../../pom.xml + + + apicurio-registry-utils-tools + jar + apicurio-registry-utils-tools + + + + + io.apicurio + apicurio-data-models + + + + org.apache.kafka + kafka-streams + + + + commons-io + commons-io + + + + org.jboss.slf4j + slf4j-jboss-logging + ${jboss-slf4j.version} + + + + org.junit.jupiter + junit-jupiter-api + test + + + diff --git a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/AddOpenApiAuth.java b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/AddOpenApiAuth.java index a91f7d2802..626a24a5b4 100644 --- a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/AddOpenApiAuth.java +++ b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/AddOpenApiAuth.java @@ -1,17 +1,16 @@ package io.apicurio.registry.utils.tools; -import java.io.File; -import java.io.FileInputStream; -import java.nio.charset.StandardCharsets; -import java.util.Collections; - -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; - import io.apicurio.datamodels.Library; import io.apicurio.datamodels.models.openapi.v30.OpenApi30Document; import io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityRequirement; import io.apicurio.datamodels.models.openapi.v30.OpenApi30SecurityScheme; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; + +import java.io.File; +import java.io.FileInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Collections; public class AddOpenApiAuth { @@ -39,16 +38,19 @@ public static void main(String[] args) throws Exception { System.out.println("Adding security scheme and requirement."); // Read the source openapi document. - OpenApi30Document document = (OpenApi30Document) Library.readDocumentFromJSONString(inputDocumentString); + OpenApi30Document document = (OpenApi30Document) Library + .readDocumentFromJSONString(inputDocumentString); // Create a security scheme for basic auth - OpenApi30SecurityScheme securityScheme = (OpenApi30SecurityScheme) document.getComponents().createSecurityScheme(); + OpenApi30SecurityScheme securityScheme = (OpenApi30SecurityScheme) document.getComponents() + .createSecurityScheme(); securityScheme.setType("http"); securityScheme.setScheme("basic"); document.getComponents().addSecurityScheme("basicAuth", securityScheme); // And now *use* the basic auth security scheme. - OpenApi30SecurityRequirement securityRequirement = (OpenApi30SecurityRequirement) document.createSecurityRequirement(); + OpenApi30SecurityRequirement securityRequirement = (OpenApi30SecurityRequirement) document + .createSecurityRequirement(); securityRequirement.addItem("basicAuth", Collections.emptyList()); document.addSecurity(securityRequirement); diff --git a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/TransformOpenApiForClientGen.java b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/TransformOpenApiForClientGen.java index 63b4314dc3..552cec3d51 100644 --- a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/TransformOpenApiForClientGen.java +++ b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/TransformOpenApiForClientGen.java @@ -42,13 +42,15 @@ public static void main(String[] args) throws Exception { } // Read the source openapi document. - OpenApi30Document document = (OpenApi30Document) Library.readDocumentFromJSONString(inputDocumentString); + OpenApi30Document document = (OpenApi30Document) Library + .readDocumentFromJSONString(inputDocumentString); attachHeaderSchema(document.getPaths().getItem("/groups/{groupId}/artifacts/{artifactId}").getPut(), "/groups/{groupId}/artifacts/{artifactId} PUT"); attachHeaderSchema(document.getPaths().getItem("/groups/{groupId}/artifacts").getPost(), "/groups/{groupId}/artifacts POST"); - attachHeaderSchema(document.getPaths().getItem("/groups/{groupId}/artifacts/{artifactId}/versions").getPost(), + attachHeaderSchema( + document.getPaths().getItem("/groups/{groupId}/artifacts/{artifactId}/versions").getPost(), "/groups/{groupId}/artifacts/{artifactId}/versions POST"); // Remove duplicated tags @@ -64,7 +66,7 @@ public static void main(String[] args) throws Exception { document.getPaths().getItem("/search/artifacts").getGet().getTags().remove("Artifacts"); document.getPaths().getItem("/search/artifacts").getPost().getTags().remove("Artifacts"); - + document.getTags().stream().filter(t -> !"Global rules".equals(t.getName())) .collect(Collectors.toList()).forEach(tag -> document.removeTag(tag)); @@ -84,7 +86,8 @@ private static void attachHeaderSchema(OpenApiOperation operation, String info) out.println("Adding explicit Content-Type header to " + info); var param = operation.createParameter(); param.setName("Content-Type"); - param.setDescription("This header is explicit so clients using the OpenAPI Generator are able select the content type. Ignore otherwise."); + param.setDescription( + "This header is explicit so clients using the OpenAPI Generator are able select the content type. Ignore otherwise."); var schema = (OpenApi30Schema) param.createSchema(); schema.setType("string"); param.setSchema(schema); diff --git a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/Transformer.java b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/Transformer.java index ec119017af..21137b8303 100644 --- a/utils/tools/src/main/java/io/apicurio/registry/utils/tools/Transformer.java +++ b/utils/tools/src/main/java/io/apicurio/registry/utils/tools/Transformer.java @@ -18,11 +18,8 @@ import java.util.function.Function; /** - * Transform messages between Confluent and Apicurio format. - * - * To start from input topic's beginning, use this config - * * auto.offset.reset=earliest / ConsumerConfig.AUTO_OFFSET_RESET_CONFIG - * + * Transform messages between Confluent and Apicurio format. To start from input topic's beginning, use this + * config * auto.offset.reset=earliest / ConsumerConfig.AUTO_OFFSET_RESET_CONFIG */ public class Transformer { private static final Logger log = LoggerFactory.getLogger(Transformer.class); @@ -35,8 +32,7 @@ enum Type implements Function { output.putLong(input.getInt()); output.put(input); return output.array(); - }), - APICURIO_TO_CONFLUENT(bytes -> { + }), APICURIO_TO_CONFLUENT(bytes -> { ByteBuffer input = ByteBuffer.wrap(bytes); ByteBuffer output = ByteBuffer.allocate(bytes.length - 4); // 4more less to int output.put(input.get()); // magic @@ -88,10 +84,8 @@ public static void main(String[] args) { log.info(String.format("Transforming: %s --> %s [%s]", inputTopic, outputTopic, type)); StreamsBuilder builder = new StreamsBuilder(); - KStream input = builder.stream( - inputTopic, - Consumed.with(Serdes.String(), Serdes.ByteArray()) - ); + KStream input = builder.stream(inputTopic, + Consumed.with(Serdes.String(), Serdes.ByteArray())); input.transformValues(() -> new ValueTransformer() { @Override