|
| 1 | +package com.fasterxml.jackson.dataformat.avro.failing; |
| 2 | + |
| 3 | +import com.fasterxml.jackson.databind.SequenceWriter; |
| 4 | +import com.fasterxml.jackson.dataformat.avro.AvroFactory; |
| 5 | +import com.fasterxml.jackson.dataformat.avro.AvroGenerator; |
| 6 | +import com.fasterxml.jackson.dataformat.avro.AvroMapper; |
| 7 | +import com.fasterxml.jackson.dataformat.avro.AvroSchema; |
| 8 | +import com.fasterxml.jackson.dataformat.avro.AvroTestBase; |
| 9 | +import com.fasterxml.jackson.dataformat.avro.schema.AvroSchemaGenerator; |
| 10 | +import org.apache.avro.file.DataFileReader; |
| 11 | +import org.apache.avro.generic.GenericDatumReader; |
| 12 | +import org.apache.avro.generic.GenericRecord; |
| 13 | +import org.apache.avro.io.DatumReader; |
| 14 | + |
| 15 | +import java.io.ByteArrayOutputStream; |
| 16 | +import java.io.File; |
| 17 | +import java.io.FileOutputStream; |
| 18 | +import java.nio.file.Files; |
| 19 | + |
| 20 | +public class FileSerializationTest extends AvroTestBase { |
| 21 | + public void testFileSerialization() throws Exception { |
| 22 | + final Employee employee = new Employee(); |
| 23 | + employee.name = "Bobbee"; |
| 24 | + employee.age = 39; |
| 25 | + employee. emails = new String[]{ "[email protected]", "[email protected]"}; |
| 26 | + employee.boss = null; |
| 27 | + |
| 28 | + final AvroFactory avroFactory = AvroFactory.builderWithApacheDecoder().enable(AvroGenerator.Feature.AVRO_FILE_OUTPUT).build(); |
| 29 | + final AvroSchemaGenerator generator = new AvroSchemaGenerator(); |
| 30 | + |
| 31 | + final AvroMapper mapper = AvroMapper.builder(avroFactory).build(); |
| 32 | + mapper.acceptJsonFormatVisitor(Employee.class, generator); |
| 33 | + |
| 34 | + final AvroSchema generatedSchema = generator.getGeneratedSchema(); |
| 35 | + |
| 36 | + final File file = Files.createTempFile("employees", ".avro").toFile(); |
| 37 | + file.deleteOnExit(); |
| 38 | + |
| 39 | + final ByteArrayOutputStream out = new ByteArrayOutputStream(); |
| 40 | + final SequenceWriter writer = mapper.writer(generatedSchema).writeValues(out); |
| 41 | + |
| 42 | + // Write multiple entries, this seems to be what makes it invalid. |
| 43 | + writer.write(employee); |
| 44 | + writer.write(employee); |
| 45 | + writer.close(); |
| 46 | + |
| 47 | + // Write the bytes to a file |
| 48 | + try (FileOutputStream outputStream = new FileOutputStream(file)) { |
| 49 | + out.writeTo(outputStream); |
| 50 | + } |
| 51 | + |
| 52 | + final DatumReader<GenericRecord> datumReader = new GenericDatumReader<>(generatedSchema.getAvroSchema()); |
| 53 | + |
| 54 | + @SuppressWarnings("resource") final DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(file, datumReader); |
| 55 | + |
| 56 | + GenericRecord output = dataFileReader.next(); |
| 57 | + assertNotNull(output); |
| 58 | + assertEquals(output.get("name").toString(), employee.name); |
| 59 | + |
| 60 | + // This line currently throws the following exception: |
| 61 | + // org.apache.avro.AvroRuntimeException: java.io.IOException: Invalid sync! |
| 62 | + output = dataFileReader.next(); |
| 63 | + assertNotNull(output); |
| 64 | + assertEquals(output.get("name").toString(), employee.name); |
| 65 | + } |
| 66 | +} |
0 commit comments