From 8c1038dbe381483465b06ab22ae62484e8726f76 Mon Sep 17 00:00:00 2001 From: Michel Davit Date: Tue, 12 Nov 2024 16:05:20 +0100 Subject: [PATCH] Sateful schema compiler (#216) Allow schema files to depend on recompiled records --- .../github/sbt/avro/AvroCompilerBridge.java | 120 +++--- .../github/sbt/avro/AvscFilesCompiler.java | 214 ---------- .../com/github/sbt/avro/AvscFilesParser.java | 91 +++++ .../sbt/avro/test/TestSpecificRecord.java | 308 --------------- .../avro/test/TestSpecificRecordParent.java | 365 ------------------ .../src/test/resources/avro/test_records.avsc | 35 +- .../sbt/avro/AvscFilesCompilerSpec.scala | 107 ----- .../github/sbt/avro/AvscFilesParserSpec.scala | 76 ++++ .../scala/com/github/sbt/avro/SbtAvro.scala | 2 +- .../github/sbt/avro/CustomAvroCompiler.java | 24 +- 10 files changed, 254 insertions(+), 1088 deletions(-) delete mode 100644 bridge/src/main/java/com/github/sbt/avro/AvscFilesCompiler.java create mode 100644 bridge/src/main/java/com/github/sbt/avro/AvscFilesParser.java delete mode 100644 bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecord.java delete mode 100644 bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecordParent.java delete mode 100644 bridge/src/test/scala/com/github/sbt/avro/AvscFilesCompilerSpec.scala create mode 100644 bridge/src/test/scala/com/github/sbt/avro/AvscFilesParserSpec.scala diff --git a/bridge/src/main/java/com/github/sbt/avro/AvroCompilerBridge.java b/bridge/src/main/java/com/github/sbt/avro/AvroCompilerBridge.java index a0d54b0..2df92d0 100644 --- a/bridge/src/main/java/com/github/sbt/avro/AvroCompilerBridge.java +++ b/bridge/src/main/java/com/github/sbt/avro/AvroCompilerBridge.java @@ -1,8 +1,7 @@ package com.github.sbt.avro; import org.apache.avro.Schema; -import org.apache.avro.specific.SpecificRecord; - +import org.apache.avro.specific.SpecificData; import org.apache.avro.Protocol; import org.apache.avro.compiler.idl.Idl; import org.apache.avro.compiler.specific.SpecificCompiler; @@ -10,25 +9,26 @@ import org.apache.avro.generic.GenericData.StringType; import java.io.File; -import java.util.HashSet; -import java.util.Set; +import java.util.*; public class AvroCompilerBridge implements AvroCompiler { private static final AvroVersion AVRO_1_9_0 = new AvroVersion(1, 9, 0); private static final AvroVersion AVRO_1_10_0 = new AvroVersion(1, 10, 0); - private final AvroVersion avroVersion = AvroVersion.getRuntimeVersion(); + private final AvroVersion avroVersion; + protected AvscFilesParser parser; - private StringType stringType; - private FieldVisibility fieldVisibility; - private boolean useNamespace; - private boolean enableDecimalLogicalType; - private boolean createSetters; - private boolean optionalGetters; + protected StringType stringType; + protected FieldVisibility fieldVisibility; + protected boolean useNamespace; + protected boolean enableDecimalLogicalType; + protected boolean createSetters; + protected boolean optionalGetters; - protected Schema.Parser createParser() { - return new Schema.Parser(); + public AvroCompilerBridge() { + avroVersion = AvroVersion.getRuntimeVersion(); + parser = new AvscFilesParser(); } @Override @@ -61,12 +61,9 @@ public void setOptionalGetters(boolean optionalGetters) { this.optionalGetters = optionalGetters; } - @Override - public void recompile(Class[] records, File target) throws Exception { - AvscFilesCompiler compiler = new AvscFilesCompiler(this::createParser); + protected void configureCompiler(SpecificCompiler compiler) { compiler.setStringType(stringType); compiler.setFieldVisibility(fieldVisibility); - compiler.setUseNamespace(useNamespace); compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); compiler.setCreateSetters(createSetters); if (avroVersion.compareTo(AVRO_1_9_0) >= 0) { @@ -75,14 +72,20 @@ public void recompile(Class[] records, File target) throws Exception { if (avroVersion.compareTo(AVRO_1_10_0) >= 0) { compiler.setOptionalGettersForNullableFieldsOnly(optionalGetters); } - compiler.setTemplateDirectory("/org/apache/avro/compiler/specific/templates/java/classic/"); + } - Set> classes = new HashSet<>(); + @Override + public void recompile(Class[] records, File target) throws Exception { + List schemas = new ArrayList<>(records.length); for (Class record : records) { System.out.println("Recompiling Avro record: " + record.getName()); - classes.add((Class) record); + Schema schema = SpecificData.get().getSchema(record); + schemas.add(schema); + SpecificCompiler compiler = new SpecificCompiler(schema); + configureCompiler(compiler); + compiler.compileToDestination(null, target); } - compiler.compileClasses(classes, target); + parser.addTypes(schemas); } @Override @@ -92,42 +95,32 @@ public void compileIdls(File[] idls, File target) throws Exception { Idl parser = new Idl(idl); Protocol protocol = parser.CompilationUnit(); SpecificCompiler compiler = new SpecificCompiler(protocol); - compiler.setStringType(stringType); - compiler.setFieldVisibility(fieldVisibility); - compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); - compiler.setCreateSetters(createSetters); - if (avroVersion.compareTo(AVRO_1_9_0) >= 0) { - compiler.setGettersReturnOptional(optionalGetters); - } - if (avroVersion.compareTo(AVRO_1_10_0) >= 0) { - compiler.setOptionalGettersForNullableFieldsOnly(optionalGetters); - } - compiler.compileToDestination(null, target); + configureCompiler(compiler); + compiler.compileToDestination(idl, target); } } @Override public void compileAvscs(AvroFileRef[] avscs, File target) throws Exception { - AvscFilesCompiler compiler = new AvscFilesCompiler(this::createParser); - compiler.setStringType(stringType); - compiler.setFieldVisibility(fieldVisibility); - compiler.setUseNamespace(useNamespace); - compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); - compiler.setCreateSetters(createSetters); - if (avroVersion.compareTo(AVRO_1_9_0) >= 0) { - compiler.setGettersReturnOptional(optionalGetters); - } - if (avroVersion.compareTo(AVRO_1_10_0) >= 0) { - compiler.setOptionalGettersForNullableFieldsOnly(optionalGetters); - } - compiler.setTemplateDirectory("/org/apache/avro/compiler/specific/templates/java/classic/"); - - Set files = new HashSet<>(); + List files = new ArrayList<>(avscs.length); for (AvroFileRef ref : avscs) { System.out.println("Compiling Avro schema: " + ref.getFile()); files.add(ref); } - compiler.compileFiles(files, target); + Map schemas = parser.parseFiles(files); + if (useNamespace) { + for (Map.Entry s: schemas.entrySet()) { + validateParsedSchema(s.getKey(), s.getValue()); + } + } + + for (Map.Entry entry: schemas.entrySet()) { + File file = entry.getKey().getFile(); + Schema schema = entry.getValue(); + SpecificCompiler compiler = new SpecificCompiler(schema); + configureCompiler(compiler); + compiler.compileToDestination(file, target); + } } @Override @@ -136,17 +129,28 @@ public void compileAvprs(File[] avprs, File target) throws Exception { System.out.println("Compiling Avro protocol: " + avpr); Protocol protocol = Protocol.parse(avpr); SpecificCompiler compiler = new SpecificCompiler(protocol); - compiler.setStringType(stringType); - compiler.setFieldVisibility(fieldVisibility); - compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); - compiler.setCreateSetters(createSetters); - if (avroVersion.compareTo(AVRO_1_9_0) >= 0) { - compiler.setGettersReturnOptional(optionalGetters); - } - if (avroVersion.compareTo(AVRO_1_10_0) >= 0) { - compiler.setOptionalGettersForNullableFieldsOnly(optionalGetters); - } + configureCompiler(compiler); compiler.compileToDestination(null, target); } } + + private void validateParsedSchema(AvroFileRef src, Schema schema) { + if (useNamespace) { + if (schema.getType() != Schema.Type.RECORD && schema.getType() != Schema.Type.ENUM) { + throw new SchemaGenerationException(String.format( + "Error compiling schema file %s. " + + "Only one root RECORD or ENUM type is allowed per file.", + src + )); + } else if (!src.pathToClassName().equals(schema.getFullName())) { + throw new SchemaGenerationException(String.format( + "Error compiling schema file %s. " + + "File class name %s does not match record class name %s", + src, + src.pathToClassName(), + schema.getFullName() + )); + } + } + } } diff --git a/bridge/src/main/java/com/github/sbt/avro/AvscFilesCompiler.java b/bridge/src/main/java/com/github/sbt/avro/AvscFilesCompiler.java deleted file mode 100644 index 4e6674a..0000000 --- a/bridge/src/main/java/com/github/sbt/avro/AvscFilesCompiler.java +++ /dev/null @@ -1,214 +0,0 @@ -package com.github.sbt.avro; - -import org.apache.avro.AvroRuntimeException; -import org.apache.avro.Schema; -import org.apache.avro.compiler.specific.SpecificCompiler; -import org.apache.avro.generic.GenericData; -import org.apache.avro.specific.SpecificData; -import org.apache.avro.specific.SpecificRecord; - -import java.io.File; -import java.io.IOException; -import java.util.*; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class AvscFilesCompiler { - - private final Supplier parserSupplier; - private Schema.Parser schemaParser; - - private String templateDirectory; - private GenericData.StringType stringType; - private SpecificCompiler.FieldVisibility fieldVisibility; - private boolean useNamespace; - private boolean enableDecimalLogicalType; - private boolean createSetters; - private Boolean gettersReturnOptional; - private Boolean optionalGettersForNullableFieldsOnly; - private Map compileExceptions; - - public AvscFilesCompiler(Supplier parserSupplier) { - this.parserSupplier = parserSupplier; - this.schemaParser = parserSupplier.get(); - } - - public void compileFiles(Set files, File outputDirectory) { - Set compiledFiles = new HashSet<>(); - Set uncompiledFiles = new HashSet<>(files); - - boolean progressed = true; - while (progressed && !uncompiledFiles.isEmpty()) { - progressed = false; - compileExceptions = new HashMap<>(); - - for (AvroFileRef file : uncompiledFiles) { - boolean success = tryCompile(file, outputDirectory); - if (success) { - compiledFiles.add(file); - progressed = true; - } - } - - uncompiledFiles.removeAll(compiledFiles); - } - - if (!uncompiledFiles.isEmpty()) { - String failedFiles = uncompiledFiles.stream() - .flatMap(f -> { - Exception e = compileExceptions.get(f); - if (e == null) { - return Stream.empty(); - } else { - return Stream.of(f.getFile().getName() + ": " + e.getMessage()); - } - }) - .collect(Collectors.joining(",\n")); - - throw new SchemaGenerationException("Can not compile schema files:\n" + failedFiles); - } - } - - public void compileClasses(Set> classes, File outputDirectory) { - Set> compiledClasses = new HashSet<>(); - Set> uncompiledClasses = new HashSet<>(classes); - - boolean progressed = true; - while (progressed && !uncompiledClasses.isEmpty()) { - progressed = false; - compileExceptions = new HashMap<>(); - - for (Class clazz : uncompiledClasses) { - Schema schema = SpecificData.get().getSchema(clazz); - boolean success = tryCompile(null, schema, outputDirectory); - if (success) { - compiledClasses.add(clazz); - progressed = true; - } - } - - uncompiledClasses.removeAll(compiledClasses); - } - - if (!uncompiledClasses.isEmpty()) { - String failedClasses = uncompiledClasses.stream() - .map(Class::toString) - .flatMap(c -> { - Exception e = compileExceptions.get(c); - if (e == null) { - return Stream.empty(); - } else { - return Stream.of(c + ": " + e.getMessage()); - } - }) - .collect(Collectors.joining(",\n")); - throw new SchemaGenerationException("Can not re-compile classes:\n" + failedClasses); - } - } - - private boolean tryCompile(AvroFileRef src, File outputDirectory) { - Schema.Parser successfulSchemaParser = stashParser(); - final Schema schema; - try { - schema = schemaParser.parse(src.getFile()); - validateParsedSchema(src, schema); - } catch (AvroRuntimeException e) { - schemaParser = successfulSchemaParser; - compileExceptions.put(src, e); - return false; - } catch (IOException e) { - throw new SchemaGenerationException(String.format("Error parsing schema file %s", src), e); - } - - return tryCompile(src.getFile(), schema, outputDirectory); - } - - private boolean tryCompile(File src, Schema schema, File outputDirectory) { - SpecificCompiler compiler = new SpecificCompiler(schema); - compiler.setTemplateDir(templateDirectory); - compiler.setStringType(stringType); - compiler.setFieldVisibility(fieldVisibility); - compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); - compiler.setCreateSetters(createSetters); - - if (gettersReturnOptional != null) { - compiler.setGettersReturnOptional(gettersReturnOptional); - } - if (optionalGettersForNullableFieldsOnly != null) { - compiler.setOptionalGettersForNullableFieldsOnly(optionalGettersForNullableFieldsOnly); - } - - try { - compiler.compileToDestination(src, outputDirectory); - } catch (IOException e) { - throw new SchemaGenerationException( - String.format("Error compiling schema file %s to %s", src, outputDirectory), e); - } - - return true; - } - - private Schema.Parser stashParser() { - // on failure Schema.Parser changes cache state. - // We want last successful state. - Schema.Parser parser = parserSupplier.get(); - Set predefinedTypes = parser.getTypes().keySet(); - Map compiledTypes = schemaParser.getTypes(); - compiledTypes.keySet().removeAll(predefinedTypes); - parser.addTypes(compiledTypes); - return parser; - } - - private void validateParsedSchema(AvroFileRef src, Schema schema) { - if (useNamespace) { - if (schema.getType() != Schema.Type.RECORD && schema.getType() != Schema.Type.ENUM) { - throw new SchemaGenerationException(String.format( - "Error compiling schema file %s. " - + "Only one root RECORD or ENUM type is allowed per file.", - src - )); - } else if (!src.pathToClassName().equals(schema.getFullName())) { - throw new SchemaGenerationException(String.format( - "Error compiling schema file %s. " - + "File class name %s does not match record class name %s", - src, - src.pathToClassName(), - schema.getFullName() - )); - } - } - } - - public void setTemplateDirectory(String templateDirectory) { - this.templateDirectory = templateDirectory; - } - - public void setStringType(GenericData.StringType stringType) { - this.stringType = stringType; - } - - public void setFieldVisibility(SpecificCompiler.FieldVisibility fieldVisibility) { - this.fieldVisibility = fieldVisibility; - } - - public void setUseNamespace(boolean useNamespace) { - this.useNamespace = useNamespace; - } - - public void setEnableDecimalLogicalType(Boolean enableDecimalLogicalType) { - this.enableDecimalLogicalType = enableDecimalLogicalType; - } - - public void setCreateSetters(boolean createSetters) { - this.createSetters = createSetters; - } - - public void setGettersReturnOptional(final boolean gettersReturnOptional) { - this.gettersReturnOptional = gettersReturnOptional; - } - - public void setOptionalGettersForNullableFieldsOnly(final boolean optionalGettersForNullableFieldsOnly) { - this.optionalGettersForNullableFieldsOnly = optionalGettersForNullableFieldsOnly; - } -} diff --git a/bridge/src/main/java/com/github/sbt/avro/AvscFilesParser.java b/bridge/src/main/java/com/github/sbt/avro/AvscFilesParser.java new file mode 100644 index 0000000..f50b546 --- /dev/null +++ b/bridge/src/main/java/com/github/sbt/avro/AvscFilesParser.java @@ -0,0 +1,91 @@ +package com.github.sbt.avro; + +import org.apache.avro.AvroRuntimeException; +import org.apache.avro.Schema; + +import java.io.IOException; +import java.util.*; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +public class AvscFilesParser { + + private final Supplier parserSupplier; + // act as the ParseContext introduced in avro 1.12 + // contain all types known by the parser + private Map context; + + public AvscFilesParser() { + this(Schema.Parser::new); + } + + public AvscFilesParser(Supplier parserSupplier) { + this.parserSupplier = parserSupplier; + this.context = new HashMap<>(); + } + + public void addTypes(Iterable types) { + for (Schema schema : types) { + context.put(schema.getFullName(), schema); + } + } + + public Map parseFiles(Collection files) { + Set unparsedFiles = new HashSet<>(files); + Map parsedFiles = new HashMap<>(); + Map parseExceptions = new HashMap<>(); + + Schema.Parser parser = unstashParser(); + boolean progressed = true; + while (progressed && !unparsedFiles.isEmpty()) { + progressed = false; + parseExceptions.clear(); + + for (AvroFileRef file : unparsedFiles) { + try { + Schema schema = parser.parse(file.getFile()); + parsedFiles.put(file, schema); + progressed = true; + stashParser(parser); + } catch (AvroRuntimeException e) { + parseExceptions.put(file, e); + parser = unstashParser(); + } catch (IOException e) { + throw new SchemaGenerationException(String.format("Error parsing schema file %s", file), e); + } + } + + unparsedFiles.removeAll(parsedFiles.keySet()); + } + + if (!unparsedFiles.isEmpty()) { + String failedFiles = unparsedFiles.stream() + .map(f -> { + String message = Optional.ofNullable(parseExceptions.get(f)) + .map(Exception::getMessage) + .orElse("Unknown error"); + return f.getFile().getName() + ": " + message; + }) + .collect(Collectors.joining(",\n")); + + throw new SchemaGenerationException("Can not parse schema files:\n" + failedFiles); + } + + return parsedFiles; + } + + private void stashParser(Schema.Parser parser) { + this.context = parser.getTypes(); + } + + private Schema.Parser unstashParser() { + // on failure Schema.Parser changes cache state. + // We want last successful state. + Schema.Parser parser = parserSupplier.get(); + // filter-out known types + Set predefinedTypes = parser.getTypes().keySet(); + context.keySet().removeAll(predefinedTypes); + parser.addTypes(context); + return parser; + } +} diff --git a/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecord.java b/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecord.java deleted file mode 100644 index 39f6d7f..0000000 --- a/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecord.java +++ /dev/null @@ -1,308 +0,0 @@ -/** - * Autogenerated by Avro - * - * DO NOT EDIT DIRECTLY - */ -package com.github.sbt.avro.test; - -import org.apache.avro.specific.SpecificData; -import org.apache.avro.util.Utf8; -import org.apache.avro.message.BinaryMessageEncoder; -import org.apache.avro.message.BinaryMessageDecoder; -import org.apache.avro.message.SchemaStore; - -@org.apache.avro.specific.AvroGenerated -public class TestSpecificRecord extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - private static final long serialVersionUID = 8383833071851424655L; - - - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TestSpecificRecord\",\"namespace\":\"com.github.sbt.avro\",\"fields\":[{\"name\":\"value\",\"type\":\"string\"}]}"); - public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } - - private static final SpecificData MODEL$ = new SpecificData(); - - private static final BinaryMessageEncoder ENCODER = - new BinaryMessageEncoder<>(MODEL$, SCHEMA$); - - private static final BinaryMessageDecoder DECODER = - new BinaryMessageDecoder<>(MODEL$, SCHEMA$); - - /** - * Return the BinaryMessageEncoder instance used by this class. - * @return the message encoder used by this class - */ - public static BinaryMessageEncoder getEncoder() { - return ENCODER; - } - - /** - * Return the BinaryMessageDecoder instance used by this class. - * @return the message decoder used by this class - */ - public static BinaryMessageDecoder getDecoder() { - return DECODER; - } - - /** - * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. - * @param resolver a {@link SchemaStore} used to find schemas by fingerprint - * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore - */ - public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { - return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); - } - - /** - * Serializes this TestSpecificRecord to a ByteBuffer. - * @return a buffer holding the serialized data for this instance - * @throws java.io.IOException if this instance could not be serialized - */ - public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { - return ENCODER.encode(this); - } - - /** - * Deserializes a TestSpecificRecord from a ByteBuffer. - * @param b a byte buffer holding serialized data for an instance of this class - * @return a TestSpecificRecord instance decoded from the given buffer - * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class - */ - public static TestSpecificRecord fromByteBuffer( - java.nio.ByteBuffer b) throws java.io.IOException { - return DECODER.decode(b); - } - - private java.lang.CharSequence value; - - /** - * Default constructor. Note that this does not initialize fields - * to their default values from the schema. If that is desired then - * one should use newBuilder(). - */ - public TestSpecificRecord() {} - - /** - * All-args constructor. - * @param value The new value for value - */ - public TestSpecificRecord(java.lang.CharSequence value) { - this.value = value; - } - - @Override - public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } - - @Override - public org.apache.avro.Schema getSchema() { return SCHEMA$; } - - // Used by DatumWriter. Applications should not call. - @Override - public java.lang.Object get(int field$) { - switch (field$) { - case 0: return value; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - // Used by DatumReader. Applications should not call. - @Override - @SuppressWarnings(value="unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: value = (java.lang.CharSequence)value$; break; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - /** - * Gets the value of the 'value' field. - * @return The value of the 'value' field. - */ - public java.lang.CharSequence getValue() { - return value; - } - - - /** - * Sets the value of the 'value' field. - * @param value the value to set. - */ - public void setValue(java.lang.CharSequence value) { - this.value = value; - } - - /** - * Creates a new TestSpecificRecord RecordBuilder. - * @return A new TestSpecificRecord RecordBuilder - */ - public static TestSpecificRecord.Builder newBuilder() { - return new TestSpecificRecord.Builder(); - } - - /** - * Creates a new TestSpecificRecord RecordBuilder by copying an existing Builder. - * @param other The existing builder to copy. - * @return A new TestSpecificRecord RecordBuilder - */ - public static TestSpecificRecord.Builder newBuilder(TestSpecificRecord.Builder other) { - if (other == null) { - return new TestSpecificRecord.Builder(); - } else { - return new TestSpecificRecord.Builder(other); - } - } - - /** - * Creates a new TestSpecificRecord RecordBuilder by copying an existing TestSpecificRecord instance. - * @param other The existing instance to copy. - * @return A new TestSpecificRecord RecordBuilder - */ - public static TestSpecificRecord.Builder newBuilder(TestSpecificRecord other) { - if (other == null) { - return new TestSpecificRecord.Builder(); - } else { - return new TestSpecificRecord.Builder(other); - } - } - - /** - * RecordBuilder for TestSpecificRecord instances. - */ - @org.apache.avro.specific.AvroGenerated - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private java.lang.CharSequence value; - - /** Creates a new Builder */ - private Builder() { - super(SCHEMA$, MODEL$); - } - - /** - * Creates a Builder by copying an existing Builder. - * @param other The existing Builder to copy. - */ - private Builder(TestSpecificRecord.Builder other) { - super(other); - if (isValidValue(fields()[0], other.value)) { - this.value = data().deepCopy(fields()[0].schema(), other.value); - fieldSetFlags()[0] = other.fieldSetFlags()[0]; - } - } - - /** - * Creates a Builder by copying an existing TestSpecificRecord instance - * @param other The existing instance to copy. - */ - private Builder(TestSpecificRecord other) { - super(SCHEMA$, MODEL$); - if (isValidValue(fields()[0], other.value)) { - this.value = data().deepCopy(fields()[0].schema(), other.value); - fieldSetFlags()[0] = true; - } - } - - /** - * Gets the value of the 'value' field. - * @return The value. - */ - public java.lang.CharSequence getValue() { - return value; - } - - - /** - * Sets the value of the 'value' field. - * @param value The value of 'value'. - * @return This builder. - */ - public TestSpecificRecord.Builder setValue(java.lang.CharSequence value) { - validate(fields()[0], value); - this.value = value; - fieldSetFlags()[0] = true; - return this; - } - - /** - * Checks whether the 'value' field has been set. - * @return True if the 'value' field has been set, false otherwise. - */ - public boolean hasValue() { - return fieldSetFlags()[0]; - } - - - /** - * Clears the value of the 'value' field. - * @return This builder. - */ - public TestSpecificRecord.Builder clearValue() { - value = null; - fieldSetFlags()[0] = false; - return this; - } - - @Override - @SuppressWarnings("unchecked") - public TestSpecificRecord build() { - try { - TestSpecificRecord record = new TestSpecificRecord(); - record.value = fieldSetFlags()[0] ? this.value : (java.lang.CharSequence) defaultValue(fields()[0]); - return record; - } catch (org.apache.avro.AvroMissingFieldException e) { - throw e; - } catch (java.lang.Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } - } - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumWriter - WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); - - @Override public void writeExternal(java.io.ObjectOutput out) - throws java.io.IOException { - WRITER$.write(this, SpecificData.getEncoder(out)); - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumReader - READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); - - @Override public void readExternal(java.io.ObjectInput in) - throws java.io.IOException { - READER$.read(this, SpecificData.getDecoder(in)); - } - - @Override protected boolean hasCustomCoders() { return true; } - - @Override public void customEncode(org.apache.avro.io.Encoder out) - throws java.io.IOException - { - out.writeString(this.value); - - } - - @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) - throws java.io.IOException - { - org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); - if (fieldOrder == null) { - this.value = in.readString(this.value instanceof Utf8 ? (Utf8)this.value : null); - - } else { - for (int i = 0; i < 1; i++) { - switch (fieldOrder[i].pos()) { - case 0: - this.value = in.readString(this.value instanceof Utf8 ? (Utf8)this.value : null); - break; - - default: - throw new java.io.IOException("Corrupt ResolvingDecoder."); - } - } - } - } -} \ No newline at end of file diff --git a/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecordParent.java b/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecordParent.java deleted file mode 100644 index d3331e6..0000000 --- a/bridge/src/test/java/com/github/sbt/avro/test/TestSpecificRecordParent.java +++ /dev/null @@ -1,365 +0,0 @@ -/** - * Autogenerated by Avro - * - * DO NOT EDIT DIRECTLY - */ -package com.github.sbt.avro.test; - -import org.apache.avro.generic.GenericArray; -import org.apache.avro.specific.SpecificData; -import org.apache.avro.util.Utf8; -import org.apache.avro.message.BinaryMessageEncoder; -import org.apache.avro.message.BinaryMessageDecoder; -import org.apache.avro.message.SchemaStore; - -@org.apache.avro.specific.AvroGenerated -public class TestSpecificRecordParent extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - private static final long serialVersionUID = 7223714509976921291L; - - - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TestSpecificRecordParent\",\"namespace\":\"com.github.sbt.avro\",\"fields\":[{\"name\":\"child\",\"type\":{\"type\":\"record\",\"name\":\"TestSpecificRecord\",\"fields\":[{\"name\":\"value\",\"type\":\"string\"}]}}]}"); - public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } - - private static final SpecificData MODEL$ = new SpecificData(); - - private static final BinaryMessageEncoder ENCODER = - new BinaryMessageEncoder<>(MODEL$, SCHEMA$); - - private static final BinaryMessageDecoder DECODER = - new BinaryMessageDecoder<>(MODEL$, SCHEMA$); - - /** - * Return the BinaryMessageEncoder instance used by this class. - * @return the message encoder used by this class - */ - public static BinaryMessageEncoder getEncoder() { - return ENCODER; - } - - /** - * Return the BinaryMessageDecoder instance used by this class. - * @return the message decoder used by this class - */ - public static BinaryMessageDecoder getDecoder() { - return DECODER; - } - - /** - * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. - * @param resolver a {@link SchemaStore} used to find schemas by fingerprint - * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore - */ - public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { - return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); - } - - /** - * Serializes this TestSpecificRecordParent to a ByteBuffer. - * @return a buffer holding the serialized data for this instance - * @throws java.io.IOException if this instance could not be serialized - */ - public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { - return ENCODER.encode(this); - } - - /** - * Deserializes a TestSpecificRecordParent from a ByteBuffer. - * @param b a byte buffer holding serialized data for an instance of this class - * @return a TestSpecificRecordParent instance decoded from the given buffer - * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class - */ - public static TestSpecificRecordParent fromByteBuffer( - java.nio.ByteBuffer b) throws java.io.IOException { - return DECODER.decode(b); - } - - private com.github.sbt.avro.test.TestSpecificRecord child; - - /** - * Default constructor. Note that this does not initialize fields - * to their default values from the schema. If that is desired then - * one should use newBuilder(). - */ - public TestSpecificRecordParent() {} - - /** - * All-args constructor. - * @param child The new value for child - */ - public TestSpecificRecordParent(com.github.sbt.avro.test.TestSpecificRecord child) { - this.child = child; - } - - @Override - public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } - - @Override - public org.apache.avro.Schema getSchema() { return SCHEMA$; } - - // Used by DatumWriter. Applications should not call. - @Override - public java.lang.Object get(int field$) { - switch (field$) { - case 0: return child; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - // Used by DatumReader. Applications should not call. - @Override - @SuppressWarnings(value="unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: child = (com.github.sbt.avro.test.TestSpecificRecord)value$; break; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - /** - * Gets the value of the 'child' field. - * @return The value of the 'child' field. - */ - public com.github.sbt.avro.test.TestSpecificRecord getChild() { - return child; - } - - - /** - * Sets the value of the 'child' field. - * @param value the value to set. - */ - public void setChild(com.github.sbt.avro.test.TestSpecificRecord value) { - this.child = value; - } - - /** - * Creates a new TestSpecificRecordParent RecordBuilder. - * @return A new TestSpecificRecordParent RecordBuilder - */ - public static com.github.sbt.avro.test.TestSpecificRecordParent.Builder newBuilder() { - return new com.github.sbt.avro.test.TestSpecificRecordParent.Builder(); - } - - /** - * Creates a new TestSpecificRecordParent RecordBuilder by copying an existing Builder. - * @param other The existing builder to copy. - * @return A new TestSpecificRecordParent RecordBuilder - */ - public static com.github.sbt.avro.test.TestSpecificRecordParent.Builder newBuilder(com.github.sbt.avro.test.TestSpecificRecordParent.Builder other) { - if (other == null) { - return new com.github.sbt.avro.test.TestSpecificRecordParent.Builder(); - } else { - return new com.github.sbt.avro.test.TestSpecificRecordParent.Builder(other); - } - } - - /** - * Creates a new TestSpecificRecordParent RecordBuilder by copying an existing TestSpecificRecordParent instance. - * @param other The existing instance to copy. - * @return A new TestSpecificRecordParent RecordBuilder - */ - public static com.github.sbt.avro.test.TestSpecificRecordParent.Builder newBuilder(com.github.sbt.avro.test.TestSpecificRecordParent other) { - if (other == null) { - return new com.github.sbt.avro.test.TestSpecificRecordParent.Builder(); - } else { - return new com.github.sbt.avro.test.TestSpecificRecordParent.Builder(other); - } - } - - /** - * RecordBuilder for TestSpecificRecordParent instances. - */ - @org.apache.avro.specific.AvroGenerated - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private com.github.sbt.avro.test.TestSpecificRecord child; - private com.github.sbt.avro.test.TestSpecificRecord.Builder childBuilder; - - /** Creates a new Builder */ - private Builder() { - super(SCHEMA$, MODEL$); - } - - /** - * Creates a Builder by copying an existing Builder. - * @param other The existing Builder to copy. - */ - private Builder(com.github.sbt.avro.test.TestSpecificRecordParent.Builder other) { - super(other); - if (isValidValue(fields()[0], other.child)) { - this.child = data().deepCopy(fields()[0].schema(), other.child); - fieldSetFlags()[0] = other.fieldSetFlags()[0]; - } - if (other.hasChildBuilder()) { - this.childBuilder = com.github.sbt.avro.test.TestSpecificRecord.newBuilder(other.getChildBuilder()); - } - } - - /** - * Creates a Builder by copying an existing TestSpecificRecordParent instance - * @param other The existing instance to copy. - */ - private Builder(com.github.sbt.avro.test.TestSpecificRecordParent other) { - super(SCHEMA$, MODEL$); - if (isValidValue(fields()[0], other.child)) { - this.child = data().deepCopy(fields()[0].schema(), other.child); - fieldSetFlags()[0] = true; - } - this.childBuilder = null; - } - - /** - * Gets the value of the 'child' field. - * @return The value. - */ - public com.github.sbt.avro.test.TestSpecificRecord getChild() { - return child; - } - - - /** - * Sets the value of the 'child' field. - * @param value The value of 'child'. - * @return This builder. - */ - public com.github.sbt.avro.test.TestSpecificRecordParent.Builder setChild(com.github.sbt.avro.test.TestSpecificRecord value) { - validate(fields()[0], value); - this.childBuilder = null; - this.child = value; - fieldSetFlags()[0] = true; - return this; - } - - /** - * Checks whether the 'child' field has been set. - * @return True if the 'child' field has been set, false otherwise. - */ - public boolean hasChild() { - return fieldSetFlags()[0]; - } - - /** - * Gets the Builder instance for the 'child' field and creates one if it doesn't exist yet. - * @return This builder. - */ - public com.github.sbt.avro.test.TestSpecificRecord.Builder getChildBuilder() { - if (childBuilder == null) { - if (hasChild()) { - setChildBuilder(com.github.sbt.avro.test.TestSpecificRecord.newBuilder(child)); - } else { - setChildBuilder(com.github.sbt.avro.test.TestSpecificRecord.newBuilder()); - } - } - return childBuilder; - } - - /** - * Sets the Builder instance for the 'child' field - * @param value The builder instance that must be set. - * @return This builder. - */ - - public com.github.sbt.avro.test.TestSpecificRecordParent.Builder setChildBuilder(com.github.sbt.avro.test.TestSpecificRecord.Builder value) { - clearChild(); - childBuilder = value; - return this; - } - - /** - * Checks whether the 'child' field has an active Builder instance - * @return True if the 'child' field has an active Builder instance - */ - public boolean hasChildBuilder() { - return childBuilder != null; - } - - /** - * Clears the value of the 'child' field. - * @return This builder. - */ - public com.github.sbt.avro.test.TestSpecificRecordParent.Builder clearChild() { - child = null; - childBuilder = null; - fieldSetFlags()[0] = false; - return this; - } - - @Override - @SuppressWarnings("unchecked") - public TestSpecificRecordParent build() { - try { - TestSpecificRecordParent record = new TestSpecificRecordParent(); - if (childBuilder != null) { - try { - record.child = this.childBuilder.build(); - } catch (org.apache.avro.AvroMissingFieldException e) { - e.addParentField(record.getSchema().getField("child")); - throw e; - } - } else { - record.child = fieldSetFlags()[0] ? this.child : (com.github.sbt.avro.test.TestSpecificRecord) defaultValue(fields()[0]); - } - return record; - } catch (org.apache.avro.AvroMissingFieldException e) { - throw e; - } catch (java.lang.Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } - } - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumWriter - WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); - - @Override public void writeExternal(java.io.ObjectOutput out) - throws java.io.IOException { - WRITER$.write(this, SpecificData.getEncoder(out)); - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumReader - READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); - - @Override public void readExternal(java.io.ObjectInput in) - throws java.io.IOException { - READER$.read(this, SpecificData.getDecoder(in)); - } - - @Override protected boolean hasCustomCoders() { return true; } - - @Override public void customEncode(org.apache.avro.io.Encoder out) - throws java.io.IOException - { - this.child.customEncode(out); - - } - - @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) - throws java.io.IOException - { - org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); - if (fieldOrder == null) { - if (this.child == null) { - this.child = new com.github.sbt.avro.test.TestSpecificRecord(); - } - this.child.customDecode(in); - - } else { - for (int i = 0; i < 1; i++) { - switch (fieldOrder[i].pos()) { - case 0: - if (this.child == null) { - this.child = new com.github.sbt.avro.test.TestSpecificRecord(); - } - this.child.customDecode(in); - break; - - default: - throw new java.io.IOException("Corrupt ResolvingDecoder."); - } - } - } - } -} \ No newline at end of file diff --git a/bridge/src/test/resources/avro/test_records.avsc b/bridge/src/test/resources/avro/test_records.avsc index b6a9c04..d1b654c 100644 --- a/bridge/src/test/resources/avro/test_records.avsc +++ b/bridge/src/test/resources/avro/test_records.avsc @@ -1,24 +1,11 @@ -[ - { - "name": "TestSpecificRecord", - "namespace": "com.github.sbt.avro", - "type": "record", - "fields": [ - { - "name": "value", - "type": "string" - } - ] - }, - { - "name": "TestSpecificRecordParent", - "namespace": "com.github.sbt.avro", - "type": "record", - "fields": [ - { - "name": "child", - "type": "TestSpecificRecord" - } - ] - } -] \ No newline at end of file +{ + "name": "TestSpecificRecordParent", + "namespace": "com.github.sbt.avro", + "type": "record", + "fields": [ + { + "name": "child", + "type": "TestSpecificRecord" + } + ] +} diff --git a/bridge/src/test/scala/com/github/sbt/avro/AvscFilesCompilerSpec.scala b/bridge/src/test/scala/com/github/sbt/avro/AvscFilesCompilerSpec.scala deleted file mode 100644 index 038930d..0000000 --- a/bridge/src/test/scala/com/github/sbt/avro/AvscFilesCompilerSpec.scala +++ /dev/null @@ -1,107 +0,0 @@ -package com.github.sbt.avro - -import com.github.sbt.avro.test.{TestSpecificRecord, TestSpecificRecordParent} -import org.apache.avro.Schema -import org.apache.avro.compiler.specific.SpecificCompiler.FieldVisibility -import org.apache.avro.generic.GenericData.StringType -import org.apache.avro.specific.SpecificRecord -import org.specs2.mutable.Specification - -import java.io.File -import java.nio.file.Files -import scala.collection.JavaConverters._ - -class AvscFilesCompilerSpec extends Specification { - val sourceDir = new File(getClass.getClassLoader.getResource("avro").toURI) - - val targetDir = Files.createTempDirectory("sbt-avro-compiler-bridge").toFile - val packageDir = new File(targetDir, "com/github/sbt/avro/test") - - val compiler = new AvscFilesCompiler(() => new Schema.Parser()) - compiler.setUseNamespace(false) - compiler.setStringType(StringType.CharSequence) - compiler.setFieldVisibility(FieldVisibility.PRIVATE) - compiler.setEnableDecimalLogicalType(true) - compiler.setGettersReturnOptional(true) - compiler.setOptionalGettersForNullableFieldsOnly(true) - compiler.setCreateSetters(true) - compiler.setTemplateDirectory("/org/apache/avro/compiler/specific/templates/java/classic/") - - "It should be possible to compile types depending on others if source files are provided in right order" >> { - val fullyQualifiedNames = Seq( - new File(sourceDir, "a.avsc"), - new File(sourceDir, "b.avsc"), - new File(sourceDir, "c.avsc"), - new File(sourceDir, "d.avsc"), - new File(sourceDir, "e.avsc") - ) - - val simpleNames = Seq( - new File(sourceDir, "_a.avsc"), - new File(sourceDir, "_b.avsc"), - new File(sourceDir, "_c.avsc"), - new File(sourceDir, "_d.avsc"), - new File(sourceDir, "_e.avsc") - ) - - val sourceFiles = fullyQualifiedNames ++ simpleNames - - val aJavaFile = new File(packageDir, "A.java") - val bJavaFile = new File(packageDir, "B.java") - val cJavaFile = new File(packageDir, "C.java") - val dJavaFile = new File(packageDir, "D.java") - val eJavaFile = new File(packageDir, "E.java") - - val _aJavaFile = new File(packageDir, "_A.java") - val _bJavaFile = new File(packageDir, "_B.java") - val _cJavaFile = new File(packageDir, "_C.java") - val _dJavaFile = new File(packageDir, "_D.java") - val _eJavaFile = new File(packageDir, "_E.java") - - aJavaFile.delete() - bJavaFile.delete() - cJavaFile.delete() - dJavaFile.delete() - eJavaFile.delete() - - _aJavaFile.delete() - _bJavaFile.delete() - _cJavaFile.delete() - _dJavaFile.delete() - _eJavaFile.delete() - - val refs = sourceFiles.map(s => new AvroFileRef(sourceDir, s.getName)) - compiler.compileFiles(refs.toSet.asJava, targetDir) - - aJavaFile.isFile must beTrue - bJavaFile.isFile must beTrue - cJavaFile.isFile must beTrue - dJavaFile.isFile must beTrue - eJavaFile.isFile must beTrue - - _aJavaFile.isFile must beTrue - _bJavaFile.isFile must beTrue - _cJavaFile.isFile must beTrue - _dJavaFile.isFile must beTrue - _eJavaFile.isFile must beTrue - } - - "It should be possible to compile types depending on others if classes are provided in right order" >> { - // TestSpecificRecordParent and TestSpecificRecord were previously generated from test_records.avsc - compiler.compileClasses( - Set[Class[_ <: SpecificRecord]]( - // put parent 1st - classOf[TestSpecificRecordParent], - classOf[TestSpecificRecord] - ).asJava, - targetDir - ) - - val record = new File(packageDir, "TestSpecificRecord.java") - val recordParent = new File(packageDir, "TestSpecificRecordParent.java") - - record.isFile must beTrue - recordParent.isFile must beTrue - } - -} diff --git a/bridge/src/test/scala/com/github/sbt/avro/AvscFilesParserSpec.scala b/bridge/src/test/scala/com/github/sbt/avro/AvscFilesParserSpec.scala new file mode 100644 index 0000000..1b4924f --- /dev/null +++ b/bridge/src/test/scala/com/github/sbt/avro/AvscFilesParserSpec.scala @@ -0,0 +1,76 @@ +package com.github.sbt.avro + +import org.apache.avro.Schema +import org.specs2.mutable.Specification + +import java.io.File +import java.nio.file.Files +import scala.collection.JavaConverters._ + +class AvscFilesParserSpec extends Specification { + val sourceDir = new File(getClass.getClassLoader.getResource("avro").toURI) + + val targetDir = Files.createTempDirectory("sbt-avro-compiler-bridge").toFile + val packageDir = new File(targetDir, "com/github/sbt/avro/test") + + "It should be possible to compile types depending on others if source files are provided in right order" >> { + val parser = new AvscFilesParser() + val fullyQualifiedNames = Seq( + new AvroFileRef(sourceDir, "a.avsc"), + new AvroFileRef(sourceDir, "b.avsc"), + new AvroFileRef(sourceDir, "c.avsc"), + new AvroFileRef(sourceDir, "d.avsc"), + new AvroFileRef(sourceDir, "e.avsc") + ) + + val simpleNames = Seq( + new AvroFileRef(sourceDir, "_a.avsc"), + new AvroFileRef(sourceDir, "_b.avsc"), + new AvroFileRef(sourceDir, "_c.avsc"), + new AvroFileRef(sourceDir, "_d.avsc"), + new AvroFileRef(sourceDir, "_e.avsc") + ) + + val sourceFiles = fullyQualifiedNames ++ simpleNames + val schemas = parser.parseFiles(sourceFiles.asJava) + val names = schemas.asScala.values.map(_.getFullName) + names must contain( + exactly( + "com.github.sbt.avro.test.A", + "com.github.sbt.avro.test._A", + "com.github.sbt.avro.test.B", + "com.github.sbt.avro.test._B", + "com.github.sbt.avro.test.C", + "com.github.sbt.avro.test._C", + "com.github.sbt.avro.test.D", + "com.github.sbt.avro.test._D", + "com.github.sbt.avro.test.E", + "com.github.sbt.avro.test._E" + ) + ) + } + + "It should be possible to compile types depending on others if classes" >> { + val parser = new AvscFilesParser() + // TestSpecificRecordParent depends on TestSpecificRecord + val dependant = new Schema.Parser().parse( + """{ + | "name": "TestSpecificRecord", + | "namespace": "com.github.sbt.avro", + | "type": "record", + | "fields": [ + | { + | "name": "value", + | "type": "string" + | } + | ] + |}""".stripMargin + ) + val parent = new AvroFileRef(sourceDir, "test_records.avsc") + parser.addTypes(Seq(dependant).asJava) + val schemas = parser.parseFiles(Seq(parent).asJava) + val names = schemas.asScala.values.map(_.getFullName) + names must contain(exactly("com.github.sbt.avro.TestSpecificRecordParent")) + } + +} diff --git a/plugin/src/main/scala/com/github/sbt/avro/SbtAvro.scala b/plugin/src/main/scala/com/github/sbt/avro/SbtAvro.scala index e33d909..3dab52e 100644 --- a/plugin/src/main/scala/com/github/sbt/avro/SbtAvro.scala +++ b/plugin/src/main/scala/com/github/sbt/avro/SbtAvro.scala @@ -258,8 +258,8 @@ object SbtAvro extends AutoPlugin { ) try { compiler.recompile(recs.toArray, outDir) - compiler.compileIdls(avdls.toArray, outDir) compiler.compileAvscs(avscs.toArray, outDir) + compiler.compileIdls(avdls.toArray, outDir) compiler.compileAvprs(avprs.toArray, outDir) (outDir ** SbtAvro.JavaFileFilter).get().toSet diff --git a/plugin/src/sbt-test/sbt-avro/avscparser/parser/src/main/java/com/github/sbt/avro/CustomAvroCompiler.java b/plugin/src/sbt-test/sbt-avro/avscparser/parser/src/main/java/com/github/sbt/avro/CustomAvroCompiler.java index 4a8d578..d98c270 100644 --- a/plugin/src/sbt-test/sbt-avro/avscparser/parser/src/main/java/com/github/sbt/avro/CustomAvroCompiler.java +++ b/plugin/src/sbt-test/sbt-avro/avscparser/parser/src/main/java/com/github/sbt/avro/CustomAvroCompiler.java @@ -8,16 +8,18 @@ public class CustomAvroCompiler extends AvroCompilerBridge { - @Override - protected Schema.Parser createParser() { - Schema.Parser parser = new Schema.Parser(); - parser.setValidateDefaults(false); - Schema externalSchema = SchemaBuilder - .enumeration("B") - .namespace("com.github.sbt.avro.test") - .symbols("B1"); - parser.addTypes(Collections.singletonList(externalSchema)); - return parser; - } + private static final Schema EXTERNAL_SCHEMA = SchemaBuilder + .enumeration("B") + .namespace("com.github.sbt.avro.test") + .symbols("B1"); + public CustomAvroCompiler() { + super(); + this.parser = new AvscFilesParser(() -> { + Schema.Parser p = new Schema.Parser(); + p.addTypes(Collections.singletonList(EXTERNAL_SCHEMA)); + p.setValidateDefaults(false); + return p; + }); + } }