diff --git a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java index eecb6d8cf3..07b8903d32 100644 --- a/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java +++ b/utils/maven-plugin/src/main/java/io/apicurio/registry/maven/ProtobufDirectoryParser.java @@ -1,9 +1,7 @@ package io.apicurio.registry.maven; import com.google.protobuf.Descriptors; -import com.squareup.wire.schema.Location; import com.squareup.wire.schema.internal.parser.ProtoFileElement; -import com.squareup.wire.schema.internal.parser.ProtoParser; import io.apicurio.registry.content.ContentHandle; import io.apicurio.registry.rest.client.RegistryClient; import io.apicurio.registry.rest.v2.beans.ArtifactReference; @@ -20,7 +18,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -40,36 +37,45 @@ public ParsedDirectoryWrapper parse(File protoFile) .filter(file -> !file.getName().equals(protoFile.getName())) .collect(Collectors.toSet()); - Map parsedFiles = new HashMap<>(); - Map schemaDefs = new HashMap<>(); + try { + final Map requiredSchemaDefs = new HashMap<>(); + final Descriptors.FileDescriptor schemaDescriptor = FileDescriptorUtils.parseProtoFileWithDependencies(protoFile, protoFiles, requiredSchemaDefs); + assert allDependenciesHaveSamePackageName(requiredSchemaDefs, schemaDescriptor.getPackage()) : "All dependencies must have the same package name as the main proto file"; + Map schemaContents = convertSchemaDefs(requiredSchemaDefs, schemaDescriptor.getPackage()); + return new DescriptorWrapper(schemaDescriptor, schemaContents); + } catch (Descriptors.DescriptorValidationException e) { + throw new RuntimeException("Failed to read schema file: " + protoFile, e); + } catch (FileDescriptorUtils.ReadSchemaException e) { + log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to read it", e.file()); + throw new RuntimeException(e.getCause()); + } catch (FileDescriptorUtils.ParseSchemaException e) { + log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", e.fileName()); + throw new RuntimeException(e.getCause()); + } + } - // Add file to set of parsed files to avoid circular dependencies - while (parsedFiles.size() != protoFiles.size()) { - boolean fileParsed = false; - for (File fileToProcess : protoFiles) { - if (fileToProcess.getName().equals(protoFile.getName()) || parsedFiles.containsKey(fileToProcess.getName())) { - continue; - } - try { - final ContentHandle schemaContent = readSchemaContent(fileToProcess); - parsedFiles.put(fileToProcess.getName(), parseProtoFile(fileToProcess, schemaDefs, parsedFiles, schemaContent)); - schemaDefs.put(fileToProcess.getName(), schemaContent); - fileParsed = true; - } catch (Exception ex) { - log.warn("Error processing Avro schema with name {}. This usually means that the references are not ready yet to parse it", fileToProcess.getName()); - } - } + private static boolean allDependenciesHaveSamePackageName(Map schemas, String mainProtoPackageName) { + return schemas.keySet().stream().allMatch(fullDepName -> fullDepName.contains(mainProtoPackageName)); + } - //If no schema has been processed during this iteration, that means there is an error in the configuration, throw exception. - if (!fileParsed) { - throw new IllegalStateException("Error found in the directory structure. Check that all required files are present."); + /** + * Converts the schema definitions to a map of ContentHandle, stripping any package information from the key, + * which is not needed for the schema registry, given that the dependent schemas are *always* in the same package + * of the main proto file. + */ + private Map convertSchemaDefs(Map requiredSchemaDefs, String mainProtoPackageName) { + if (requiredSchemaDefs.isEmpty()) { + return Map.of(); + } + Map schemaDefs = new HashMap<>(requiredSchemaDefs.size()); + for (Map.Entry entry : requiredSchemaDefs.entrySet()) { + if (schemaDefs.put(FileDescriptorUtils.extractProtoFileName(entry.getKey()), + ContentHandle.create(entry.getValue())) != null) { + log.warn("There's a clash of dependency name, likely due to stripping the expected package name ie {}: dependencies: {}", + mainProtoPackageName, Arrays.toString(requiredSchemaDefs.keySet().toArray(new Object[0]))); } } - - //parse the main schema - final ContentHandle schemaContent = readSchemaContent(protoFile); - final Descriptors.FileDescriptor schemaDescriptor = parseProtoFile(protoFile, schemaDefs, parsedFiles, schemaContent); - return new DescriptorWrapper(schemaDescriptor, schemaDefs); + return schemaDefs; } @Override @@ -97,21 +103,6 @@ public List handleSchemaReferences(RegisterArtifact rootArtif return new ArrayList<>(references); } - private Descriptors.FileDescriptor parseProtoFile(File protoFile, Map schemaDefs, Map dependencies, ContentHandle schemaContent) { - ProtoFileElement protoFileElement = ProtoParser.Companion.parse(Location.get(protoFile.getAbsolutePath()), schemaContent.content()); - try { - - final Map schemaStrings = schemaDefs.entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, - e -> e.getValue().content())); - - return FileDescriptorUtils.protoFileToFileDescriptor(schemaContent.content(), protoFile.getName(), Optional.ofNullable(protoFileElement.getPackageName()), schemaStrings, dependencies); - } catch (Descriptors.DescriptorValidationException e) { - throw new RuntimeException("Failed to read schema file: " + protoFile, e); - } - } - public static class DescriptorWrapper implements ParsedDirectoryWrapper { final Descriptors.FileDescriptor fileDescriptor; final Map schemaContents; //used to store the original file content to register the content as-is. diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java index 294ed82afd..6b6b11db63 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtils.java @@ -39,10 +39,16 @@ import metadata.ProtobufSchemaMetadata; import additionalTypes.Decimals; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; @@ -119,43 +125,57 @@ public class FileDescriptorUtils { private static final OptionElement.Kind booleanKind = OptionElement.Kind.BOOLEAN; private static final OptionElement.Kind stringKind = OptionElement.Kind.STRING; private static final OptionElement.Kind enumKind = OptionElement.Kind.ENUM; + private static final FileDescriptor[] WELL_KNOWN_DEPENDENCIES; - public static FileDescriptor[] baseDependencies() { + static { //Support all the Protobuf WellKnownTypes //and the protos from Google API, https://github.com/googleapis/googleapis - return new FileDescriptor[] { - ApiProto.getDescriptor().getFile(), - FieldMaskProto.getDescriptor().getFile(), - SourceContextProto.getDescriptor().getFile(), - StructProto.getDescriptor().getFile(), - TypeProto.getDescriptor().getFile(), - TimestampProto.getDescriptor().getFile(), - WrappersProto.getDescriptor().getFile(), - AnyProto.getDescriptor().getFile(), - EmptyProto.getDescriptor().getFile(), - DurationProto.getDescriptor().getFile(), - TimeOfDayProto.getDescriptor().getFile(), - DateProto.getDescriptor().getFile(), - CalendarPeriodProto.getDescriptor().getFile(), - ColorProto.getDescriptor().getFile(), - DayOfWeek.getDescriptor().getFile(), - LatLng.getDescriptor().getFile(), - FractionProto.getDescriptor().getFile(), - MoneyProto.getDescriptor().getFile(), - MonthProto.getDescriptor().getFile(), - PhoneNumberProto.getDescriptor().getFile(), - PostalAddressProto.getDescriptor().getFile(), - CalendarPeriodProto.getDescriptor().getFile(), - LocalizedTextProto.getDescriptor().getFile(), - IntervalProto.getDescriptor().getFile(), - ExprProto.getDescriptor().getFile(), - QuaternionProto.getDescriptor().getFile(), - PostalAddressProto.getDescriptor().getFile(), - ProtobufSchemaMetadata.getDescriptor().getFile(), - Decimals.getDescriptor().getFile() + WELL_KNOWN_DEPENDENCIES = new FileDescriptor[]{ + ApiProto.getDescriptor().getFile(), + FieldMaskProto.getDescriptor().getFile(), + SourceContextProto.getDescriptor().getFile(), + StructProto.getDescriptor().getFile(), + TypeProto.getDescriptor().getFile(), + TimestampProto.getDescriptor().getFile(), + WrappersProto.getDescriptor().getFile(), + AnyProto.getDescriptor().getFile(), + EmptyProto.getDescriptor().getFile(), + DurationProto.getDescriptor().getFile(), + TimeOfDayProto.getDescriptor().getFile(), + DateProto.getDescriptor().getFile(), + CalendarPeriodProto.getDescriptor().getFile(), + ColorProto.getDescriptor().getFile(), + DayOfWeek.getDescriptor().getFile(), + LatLng.getDescriptor().getFile(), + FractionProto.getDescriptor().getFile(), + MoneyProto.getDescriptor().getFile(), + MonthProto.getDescriptor().getFile(), + PhoneNumberProto.getDescriptor().getFile(), + PostalAddressProto.getDescriptor().getFile(), + CalendarPeriodProto.getDescriptor().getFile(), + LocalizedTextProto.getDescriptor().getFile(), + IntervalProto.getDescriptor().getFile(), + ExprProto.getDescriptor().getFile(), + QuaternionProto.getDescriptor().getFile(), + PostalAddressProto.getDescriptor().getFile(), + ProtobufSchemaMetadata.getDescriptor().getFile(), + Decimals.getDescriptor().getFile() }; } + public static FileDescriptor[] baseDependencies() { + return WELL_KNOWN_DEPENDENCIES.clone(); + } + + private static Map mutableBaseDependenciesByName(int ensureCapacity) { + // return a map using WELL_KNOWN_DEPENDENCIES to populate it + final Map deps = new HashMap<>(WELL_KNOWN_DEPENDENCIES.length + ensureCapacity); + for (FileDescriptor fd : WELL_KNOWN_DEPENDENCIES) { + deps.put(fd.getName(), fd); + } + return deps; + } + public static FileDescriptor protoFileToFileDescriptor(ProtoFileElement element) throws DescriptorValidationException { return protoFileToFileDescriptor(element, "default.proto"); @@ -191,6 +211,298 @@ public static FileDescriptor protoFileToFileDescriptor(String schemaDefinition, return FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, schemaDefs), joinedDependencies.toArray(dependenciesArray)); } + public static final class ReadSchemaException extends Exception { + private final File file; + + private ReadSchemaException(File file, Throwable cause) { + super(cause); + this.file = file; + } + + public File file() { + return file; + } + } + + /** + * Same as {@link #parseProtoFileWithDependencies(File, Set, Map)}, but with {@code requiredSchemaDeps} set to {@code null}. + */ + public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies) + throws DescriptorValidationException, ReadSchemaException, ParseSchemaException { + return parseProtoFileWithDependencies(mainProtoFile, dependencies, null); + } + + /** + * Same as {@link #parseProtoFileWithDependencies(File, Set, Map, boolean)}, but with {@code failFast} set to {@code true} + * and {@code requiredSchemaDeps} set to {@code null}. + */ + + public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies, + Map requiredSchemaDeps) + throws ReadSchemaException, DescriptorValidationException, ParseSchemaException { + return parseProtoFileWithDependencies(mainProtoFile, dependencies, requiredSchemaDeps, true); + } + + /** + * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve any + * transitive dependency.
+ * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as soon as + * any parsing error happen in the list of provided dependencies, regardless been required or not, or it will proceed + * until a required dependency cannot be resolved.
+ * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which keys are in the + * form of {@code packageName/fileName} and the value is the schema definition of the dependency. + */ + public static FileDescriptor parseProtoFileWithDependencies(File mainProtoFile, Set dependencies, + Map requiredSchemaDeps, boolean failFast) + throws DescriptorValidationException, ReadSchemaException, ParseSchemaException { + Objects.requireNonNull(mainProtoFile); + Objects.requireNonNull(dependencies); + + final Map resolvedDeps = mutableBaseDependenciesByName(dependencies.size()); + final Map schemaDeps = new HashMap<>(dependencies.size()); + final Map cachedProtoFileDependencies = new HashMap<>(dependencies.size()); + readAndParseSchemas(dependencies, schemaDeps, cachedProtoFileDependencies, failFast); + // fail-fast won't apply to the main proto file + final String schemaDefinition; + try { + schemaDefinition = new String(Files.readAllBytes(mainProtoFile.toPath()), StandardCharsets.UTF_8); + } catch (IOException e) { + throw new ReadSchemaException(mainProtoFile, e); + } + final ProtoFileElement mainProtoElement; + try { + mainProtoElement = ProtoParser.Companion.parse(Location.get(mainProtoFile.getAbsolutePath()), schemaDefinition); + } catch (Throwable t) { + throw new ParseSchemaException(mainProtoFile.getName(), t); + } + if (requiredSchemaDeps != null) { + requiredSchemaDeps.clear(); + } + return resolveFileDescriptor(mainProtoElement, schemaDefinition, mainProtoFile.getName(), schemaDeps, + resolvedDeps, requiredSchemaDeps, new HashSet<>(), cachedProtoFileDependencies); + } + + private static void readAndParseSchemas(Collection schemas, Map schemaContents, + Map protoFileElements, boolean failFast) + throws ReadSchemaException, ParseSchemaException { + Objects.requireNonNull(schemas); + for (File schema : schemas) { + final String schemaContent; + try { + schemaContent = new String(Files.readAllBytes(schema.toPath()), StandardCharsets.UTF_8); + } catch (IOException e) { + if (failFast) { + throw new ReadSchemaException(schema, e); + } + continue; + } + final ProtoFileElement protoFile; + try { + protoFile = ProtoParser.Companion.parse(Location.get(schema.getAbsolutePath()), schemaContent); + } catch (Throwable t) { + if (failFast) { + throw new ParseSchemaException(schema.getName(), t); + } + continue; + } + final String protoFullName = toProtoFullName(protoFile, schema.getName()); + protoFileElements.put(protoFullName, protoFile); + schemaContents.put(protoFullName, schemaContent); + } + } + + public static final class ProtobufSchemaContent { + private final String fileName; + private final String schemaDefinition; + + private ProtobufSchemaContent(String fileName, String schemaDefinition) { + Objects.requireNonNull(fileName); + Objects.requireNonNull(schemaDefinition); + this.fileName = fileName; + this.schemaDefinition = schemaDefinition; + } + + public String fileName() { + return fileName; + } + + public String schemaDefinition() { + return schemaDefinition; + } + + public static ProtobufSchemaContent of(String fileName, String schemaDefinition) { + return new ProtobufSchemaContent(fileName, schemaDefinition); + } + } + + public static final class ParseSchemaException extends Exception { + private final String fileName; + + private ParseSchemaException(String fileName, Throwable cause) { + super(cause); + this.fileName = fileName; + } + + public String fileName() { + return fileName; + } + } + + private static void parseSchemas(Collection schemas, Map schemaContents, + Map protoFileElements, boolean failFast) throws ParseSchemaException { + Objects.requireNonNull(schemas); + for (ProtobufSchemaContent schema : schemas) { + final ProtoFileElement protoFile; + try { + protoFile = ProtoParser.Companion.parse(DEFAULT_LOCATION, schema.schemaDefinition()); + } catch (Throwable t) { + if (failFast) { + throw new ParseSchemaException(schema.fileName(), t); + } + // ignore and move on! + continue; + } + final String protoFullName = toProtoFullName(protoFile, schema.fileName()); + protoFileElements.put(protoFullName, protoFile); + schemaContents.put(protoFullName, schema.schemaDefinition()); + } + } + + /** + * Same as {@link #parseProtoFileWithDependencies(ProtobufSchemaContent, Collection, Map, boolean)}, + * but with {@code failFast} set to {@code true} and {@code requiredSchemaDeps} set to {@code null}. + */ + public static FileDescriptor parseProtoFileWithDependencies(ProtobufSchemaContent mainProtoFile, + Collection dependencies) + throws DescriptorValidationException, ParseSchemaException { + return parseProtoFileWithDependencies(mainProtoFile, dependencies, null, true); + } + + /** + * Parse a proto file with its dependencies to produce a {@link FileDescriptor} of it, trying to resolve any + * transitive dependency.
+ * Both the dependencies and the main proto file must be provided as {@link ProtobufSchemaContent}, still unparsed, + * and which {@link ProtobufSchemaContent#fileName()} doesn't require to specify the package name, automatically + * later resolved by parsing {@link ProtobufSchemaContent#schemaDefinition()}.
+ * During the resolution of dependencies process, depending on {@code failFast}, the process will fail as soon as + * any parsing error happen in the list of provided dependencies, regardless been required or not, or it will proceed + * until a required dependency cannot be resolved.
+ * If {@code requiredSchemaDeps} is provided, it will be populated with the required dependencies, which keys are in the + * form of {@code packageName/fileName} and the value is the schema definition of the dependency. + */ + public static FileDescriptor parseProtoFileWithDependencies(ProtobufSchemaContent mainProtoFile, + Collection dependencies, + Map requiredSchemaDeps, + boolean failFast) + throws DescriptorValidationException, ParseSchemaException { + Objects.requireNonNull(mainProtoFile); + Objects.requireNonNull(dependencies); + final Map resolvedDependencies = mutableBaseDependenciesByName(dependencies.size()); + final Map schemaDefinitions = new HashMap<>(dependencies.size()); + final Map protoFileElements = new HashMap<>(dependencies.size()); + parseSchemas(dependencies, schemaDefinitions, protoFileElements, failFast); + final ProtoFileElement mainProtoElement; + try { + mainProtoElement = ProtoParser.Companion.parse(DEFAULT_LOCATION, mainProtoFile.schemaDefinition()); + } catch (Throwable t) { + throw new ParseSchemaException(mainProtoFile.fileName(), t); + } + return resolveFileDescriptor(mainProtoElement, mainProtoFile.schemaDefinition(), mainProtoFile.fileName(), + schemaDefinitions, resolvedDependencies, requiredSchemaDeps, new HashSet<>(), protoFileElements); + } + + private static FileDescriptor resolveFileDescriptor(ProtoFileElement mainProtoElement, + String schemaDefinition, + String protoFileName, + Map schemaDefinitions, + Map resolvedDependencies, + Map requiredDependentSchemas, + Set unresolvedImportNames, + Map cachedProtoFileDependencies) throws DescriptorValidationException { + final String mainProtoImportName = toProtoFullName(mainProtoElement, protoFileName); + if (!unresolvedImportNames.add(mainProtoImportName)) { + // TODO we can do better here, we can actually print the whole chain of dependencies + throw new IllegalStateException("Circular Dependency found"); + } + List directDependencyNames = mainProtoElement.getImports(); + if (requiredDependentSchemas == null) { + requiredDependentSchemas = new HashMap<>(directDependencyNames.size()); + } + // TODO we can make a singleton of empty fd + final FileDescriptor[] directDependencyFds = new FileDescriptor[directDependencyNames.size()]; + for (int i = 0; i < directDependencyFds.length; i++) { + final String depFullName = directDependencyNames.get(i); + FileDescriptor fdDep = resolvedDependencies.get(depFullName); + final String schemaDep = schemaDefinitions.get(depFullName); + // this has never been resolved before + if (fdDep == null) { + if (schemaDep == null) { + // In theory this is a REQUIRED dep, meaning that it should be better to fail-fast. + // We could end up here because of: + // - fail-fast is false and some error happened while reading/parsing schemas + // - the schema wasn't in the dependencies + // In both cases we can just ignore the required dependency and let the validation fail later + continue; + } + final String fileName = extractProtoFileName(depFullName); + // try reuse the existing requiredDependentSchemas: + // in case of a chain of single-children dependencies it means reusing the same map! + final Map requiredSubDependencies = requiredDependentSchemas.isEmpty() ? requiredDependentSchemas : new HashMap<>(); + final ProtoFileElement protoFile; + if (cachedProtoFileDependencies != null) { + protoFile = cachedProtoFileDependencies.get(depFullName); + // In theory this is a REQUIRED dep, meaning that it should be better to fail-fast. + // We could end up here because of: + // - fail-fast is false and some error happened while reading/parsing schemas + // - the schema wasn't in the dependencies + // In both cases we can just ignore the required dependency and let the validation fail later + if (protoFile == null) { + continue; + } + } else { + protoFile = ProtoParser.Companion.parse(DEFAULT_LOCATION, schemaDep); + } + fdDep = resolveFileDescriptor(protoFile, schemaDep, fileName, schemaDefinitions, resolvedDependencies, requiredSubDependencies, unresolvedImportNames, cachedProtoFileDependencies); + // no need to add anything + if (requiredDependentSchemas != requiredSubDependencies) { + requiredDependentSchemas.putAll(requiredSubDependencies); + } + // we have accumulated new requiredSubDependencies, we need to add them to the requiredDependentSchemas + resolvedDependencies.put(depFullName, fdDep); + } + // this is the case of a well-known dependency + if (schemaDep != null) { + // no need to add it earlier actually + requiredDependentSchemas.put(depFullName, schemaDep); + } + directDependencyFds[i] = fdDep; + } + final boolean removed = unresolvedImportNames.remove(mainProtoImportName); + assert removed : "unresolvedNames should contain depName"; + // TODO we risk to have few dependencies files to be re-written in a whole new in-memory fs + Descriptors.FileDescriptor mainProtoFd = FileDescriptor.buildFrom(toFileDescriptorProto(schemaDefinition, protoFileName, + Optional.ofNullable(mainProtoElement.getPackageName()), requiredDependentSchemas), directDependencyFds); + return mainProtoFd; + } + + private static String toProtoFullName(ProtoFileElement protoFile, String protoFileName) { + return protoFile.getPackageName() + '/' + protoFileName; + } + + /** + * Extract the proto file name out of a full proto file name, which is in the form of {@code packageName/fileName}. + */ + public static String extractProtoFileName(String protoFullName) { + int beforeStartFileName = protoFullName.lastIndexOf('/'); + final String fileName; + if (beforeStartFileName != -1) { + fileName = protoFullName.substring(beforeStartFileName + 1); + } else { + fileName = protoFullName; + } + return fileName; + } + private static FileDescriptorProto toFileDescriptorProto(String schemaDefinition, String protoFileName, Optional optionalPackageName) { return toFileDescriptorProto(schemaDefinition, protoFileName, optionalPackageName, Collections.emptyMap()); } diff --git a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java index bc32601505..08ccd118a4 100644 --- a/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java +++ b/utils/protobuf-schema-utilities/src/main/java/io/apicurio/registry/utils/protobuf/schema/ProtobufSchemaLoader.java @@ -153,8 +153,21 @@ public static ProtobufSchemaLoaderContext loadSchema(Optional packageNam String dirPath = createDirectory(dirs, inMemoryFileSystem); okio.Path path = writeFile(schemaDefinition, fileName, dirPath, inMemoryFileSystem); - for (String depKey: deps.keySet()) { - writeFile(deps.get(depKey), depKey, dirPath, inMemoryFileSystem); + for (Map.Entry schema : deps.entrySet()) { + final String depKey = schema.getKey(); + final String depSchema = schema.getValue(); + int beforeFileName = depKey.lastIndexOf('/'); + if (beforeFileName != -1) { + final String packageNameDep = depKey.substring(0, beforeFileName); + String depDirPath = dirPath; + if (!packageName.isPresent() || !packageName.get().equals(packageNameDep)) { + // apply the same logic used for dirs of the root one + depDirPath = createDirectory(packageNameDep.split("\\."), inMemoryFileSystem); + } + writeFile(depSchema, depKey.substring(beforeFileName + 1), depDirPath, inMemoryFileSystem); + } else { + writeFile(depSchema, depKey, dirPath, inMemoryFileSystem); + } } SchemaLoader schemaLoader = new SchemaLoader(inMemoryFileSystem); diff --git a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java index c1e170ea53..6cccc08691 100644 --- a/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java +++ b/utils/protobuf-schema-utilities/src/test/java/io/apicurio/registry/utils/protobuf/schema/FileDescriptorUtilsTest.java @@ -2,6 +2,8 @@ import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; +import com.google.protobuf.DynamicMessage; +import com.google.protobuf.Timestamp; import com.squareup.wire.schema.internal.parser.ProtoFileElement; import com.squareup.wire.schema.internal.parser.ProtoParser; import io.apicurio.registry.utils.protobuf.schema.syntax2.TestOrderingSyntax2; @@ -22,16 +24,29 @@ import io.apicurio.registry.utils.protobuf.schema.syntax3.jsonname.TestSyntax3JsonName; import io.apicurio.registry.utils.protobuf.schema.syntax3.options.TestOrderingSyntax3Options; import io.apicurio.registry.utils.protobuf.schema.syntax3.references.TestOrderingSyntax3References; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class FileDescriptorUtilsTest { @@ -70,6 +85,22 @@ private static Stream testProtoFileProviderForJsonName() { .map(Arguments::of); } + private static Stream testParseWithDepsProtoFilesProvider() { + ClassLoader classLoader = FileDescriptorUtilsTest.class.getClassLoader(); + File mainProtoFile = new File(Objects.requireNonNull(classLoader.getResource("parseWithDeps/producer.proto")).getFile()); + // do the same with the deps + File[] deps = Stream.of( + "mypackage0/producerId.proto", + "mypackage2/version.proto", + "broken/helloworld.proto" + ).map(s -> new File(Objects.requireNonNull(classLoader.getResource("parseWithDeps/" + s)).getFile())).toArray(File[]::new); + return Stream.of( + Arguments.of(true, true, mainProtoFile, deps), + Arguments.of(false, true, mainProtoFile, deps), + Arguments.of(true, false, mainProtoFile, deps), + Arguments.of(false, false, mainProtoFile, deps)); + } + @Test public void fileDescriptorToProtoFile_ParsesJsonNameOptionCorrectly() { Descriptors.FileDescriptor fileDescriptor = TestOrderingSyntax2.getDescriptor().getFile(); @@ -139,6 +170,88 @@ public void ParsesSchemasWithNoPackageNameSpecified() throws Exception { assertEquals(expectedFileDescriptorProto, actualFileDescriptorProto); } + + @ParameterizedTest + @MethodSource("testParseWithDepsProtoFilesProvider") + public void testParseProtoFileAndDependenciesOnDifferentPackagesAndKnownType(boolean failFast, boolean readFiles, File mainProtoFile, File[] deps) + throws Descriptors.DescriptorValidationException, FileDescriptorUtils.ParseSchemaException, FileDescriptorUtils.ReadSchemaException { + final Descriptors.FileDescriptor mainProtoFd; + final Map requiredSchemaDeps = new HashMap<>(2); + if (!readFiles) { + if (failFast) { + // it fail-fast by default + Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, () -> + FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, Set.of(deps)) + ); + return; + } + mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(mainProtoFile, Set.of(deps), requiredSchemaDeps, false); + } else { + if (failFast) { + // it fail-fast by default + Assertions.assertThrowsExactly(FileDescriptorUtils.ParseSchemaException.class, () -> + FileDescriptorUtils.parseProtoFileWithDependencies(readSchemaContent(mainProtoFile), readSchemaContents(deps)) + ); + return; + } + mainProtoFd = FileDescriptorUtils.parseProtoFileWithDependencies(readSchemaContent(mainProtoFile), readSchemaContents(deps), requiredSchemaDeps, false); + + } + final Map expectedSchemaDeps = Map.of( + "mypackage0/producerId.proto", readSelectedFileSchemaAsString("producerId.proto", deps), + "mypackage2/version.proto", readSelectedFileSchemaAsString("version.proto", deps) + ); + Assertions.assertEquals(expectedSchemaDeps, requiredSchemaDeps); + Assertions.assertNotNull(mainProtoFd.findServiceByName("MyService")); + Assertions.assertNotNull(mainProtoFd.findServiceByName("MyService").findMethodByName("Foo")); + Descriptors.Descriptor producer = mainProtoFd.findMessageTypeByName("Producer"); + // create a dynamic message with all fields populated + DynamicMessage.Builder builder = DynamicMessage.newBuilder(producer); + builder.setField(producer.findFieldByName("name"), "name"); + builder.setField(producer.findFieldByName("timestamp"), + Timestamp.newBuilder() + .setSeconds(1634123456) + .setNanos(789000000) + .build()); + Descriptors.FieldDescriptor personId = producer.findFieldByName("id"); + // assert that the id field is the expected msg type + assertEquals("mypackage0.ProducerId", personId.getMessageType().getFullName()); + Descriptors.FieldDescriptor versionId = personId.getMessageType().findFieldByName("id"); + assertEquals("mypackage2.Version", versionId.getMessageType().getFullName()); + // populate all the rest of the fields in the dynamic message + builder.setField(personId, + DynamicMessage.newBuilder(personId.getMessageType()) + .setField(versionId, + DynamicMessage.newBuilder(versionId.getMessageType()) + .setField(versionId.getMessageType().findFieldByName("id"), "id") + .build()) + .setField(personId.getMessageType().findFieldByName("name"), "name") + .build()); + assertNotNull(builder.build()); + } + + private static Collection readSchemaContents(File[] files) { + return Arrays.stream(files).map(FileDescriptorUtilsTest::readSchemaContent).collect(Collectors.toList()); + } + + private static FileDescriptorUtils.ProtobufSchemaContent readSchemaContent(File file) { + return FileDescriptorUtils.ProtobufSchemaContent.of(file.getName(), readSchemaAsString(file)); + } + + private static String readSelectedFileSchemaAsString(String fileName, File[] files) { + return Stream.of(files).filter(f -> f.getName().equals(fileName)).collect(Collectors.reducing((a, b) -> { + throw new IllegalStateException("More than one file with name " + fileName + " found"); + })).map(FileDescriptorUtilsTest::readSchemaAsString).get(); + } + + private static String readSchemaAsString(File file) { + try { + return new String(Files.readAllBytes(file.toPath()), StandardCharsets.UTF_8); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + private Descriptors.FileDescriptor schemaTextToFileDescriptor(String schema, String fileName) throws Exception { ProtoFileElement protoFileElement = ProtoParser.Companion.parse(FileDescriptorUtils.DEFAULT_LOCATION, schema); return FileDescriptorUtils.protoFileToFileDescriptor(schema, fileName, Optional.ofNullable(protoFileElement.getPackageName())); diff --git a/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/broken/helloworld.proto b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/broken/helloworld.proto new file mode 100644 index 0000000000..9111270b7b --- /dev/null +++ b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/broken/helloworld.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "io.grpc.examples.helloworld"; +option java_outer_classname = "HelloWorldProto"; +option objc_class_prefix = "HLW"; + +package mypackage3; + +// The greeting service definition. +service Greeter { + // Sends a greeting + rpc SayHello (HelloRequest) returns (HelloReply) {} + + rpc SayHelloStreamReply (HelloRequest) returns (stream HelloReply) {} + + rpc SayHelloBidiStream (stream HelloRequest) returns (stream HelloReply) {} +} + +// The request message containing the user's name. +message HelloRequest { + string name = 1; +} + +// The response message containing the greetings +message HelloReply { + string message = 1; diff --git a/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage0/producerId.proto b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage0/producerId.proto new file mode 100644 index 0000000000..0343f616ec --- /dev/null +++ b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage0/producerId.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; +import "mypackage2/version.proto"; +package mypackage0; + +message ProducerId { + string name = 1; + mypackage2.Version id = 2; +} \ No newline at end of file diff --git a/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage2/version.proto b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage2/version.proto new file mode 100644 index 0000000000..9f23857c26 --- /dev/null +++ b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/mypackage2/version.proto @@ -0,0 +1,6 @@ +syntax = "proto3"; +package mypackage2; + +message Version { + string id = 1; +} \ No newline at end of file diff --git a/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/producer.proto b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/producer.proto new file mode 100644 index 0000000000..6fbbf4108c --- /dev/null +++ b/utils/protobuf-schema-utilities/src/test/resources/parseWithDeps/producer.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; +import "mypackage0/producerId.proto"; +import "google/protobuf/timestamp.proto"; +package mypackage1; + +message Producer { + mypackage0.ProducerId id = 1; + string name = 2; + google.protobuf.Timestamp timestamp = 3; +} + +service MyService { + rpc Foo (Producer) returns (Producer); +} \ No newline at end of file