use of org.apache.avro.specific.SpecificDatumWriter in project crunch by cloudera.
the class AvroTypeSortTest method writeAvroFile.
private void writeAvroFile(List<Person> people, File avroFile) throws IOException {
FileOutputStream outputStream = new FileOutputStream(avroFile);
SpecificDatumWriter<Person> writer = new SpecificDatumWriter<Person>(Person.class);
DataFileWriter<Person> dataFileWriter = new DataFileWriter<Person>(writer);
dataFileWriter.create(Person.SCHEMA$, outputStream);
for (Person person : people) {
dataFileWriter.append(person);
}
dataFileWriter.close();
outputStream.close();
}
use of org.apache.avro.specific.SpecificDatumWriter in project crunch by cloudera.
the class MultiAvroSchemaJoinTest method setUp.
@Before
public void setUp() throws Exception {
this.personFile = File.createTempFile("person", ".avro");
this.employeeFile = File.createTempFile("employee", ".avro");
DatumWriter<Person> pdw = new SpecificDatumWriter<Person>();
DataFileWriter<Person> pfw = new DataFileWriter<Person>(pdw);
pfw.create(Person.SCHEMA$, personFile);
Person p1 = new Person();
p1.setName("Josh");
p1.setAge(19);
p1.setSiblingnames(ImmutableList.<CharSequence>of("Kate", "Mike"));
pfw.append(p1);
Person p2 = new Person();
p2.setName("Kate");
p2.setAge(17);
p2.setSiblingnames(ImmutableList.<CharSequence>of("Josh", "Mike"));
pfw.append(p2);
Person p3 = new Person();
p3.setName("Mike");
p3.setAge(12);
p3.setSiblingnames(ImmutableList.<CharSequence>of("Josh", "Kate"));
pfw.append(p3);
pfw.close();
DatumWriter<Employee> edw = new SpecificDatumWriter<Employee>();
DataFileWriter<Employee> efw = new DataFileWriter<Employee>(edw);
efw.create(Employee.SCHEMA$, employeeFile);
Employee e1 = new Employee();
e1.setName("Kate");
e1.setSalary(100000);
e1.setDepartment("Marketing");
efw.append(e1);
efw.close();
}
use of org.apache.avro.specific.SpecificDatumWriter in project camel by apache.
the class AvroDataFormat method marshal.
public void marshal(Exchange exchange, Object graph, OutputStream outputStream) throws Exception {
// the schema should be from the graph class name
Schema useSchema = actualSchema != null ? actualSchema : loadSchema(graph.getClass().getName());
DatumWriter<Object> datum = new SpecificDatumWriter<Object>(useSchema);
Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
datum.write(graph, encoder);
encoder.flush();
}
use of org.apache.avro.specific.SpecificDatumWriter in project gora by apache.
the class AvroSerializerUtil method serializer.
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> byte[] serializer(T value, Schema schema) throws IOException {
SpecificDatumWriter writer = writerMap.get(schema.getFullName());
if (writer == null) {
// ignore dirty bits
writer = new SpecificDatumWriter(schema);
writerMap.put(schema.getFullName(), writer);
}
BinaryEncoder encoderFromCache = encoders.get();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
outputStream.set(bos);
BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(bos, null);
if (encoderFromCache == null) {
encoders.set(encoder);
}
//reset the buffers
ByteArrayOutputStream os = outputStream.get();
os.reset();
writer.write(value, encoder);
encoder.flush();
byte[] byteValue = os.toByteArray();
return byteValue;
}
use of org.apache.avro.specific.SpecificDatumWriter in project sling by apache.
the class AvroContentSerializer method exportToStream.
@Override
public void exportToStream(ResourceResolver resourceResolver, DistributionExportOptions options, OutputStream outputStream) throws DistributionException {
DatumWriter<AvroShallowResource> datumWriter = new SpecificDatumWriter<AvroShallowResource>(AvroShallowResource.class);
DataFileWriter<AvroShallowResource> writer = new DataFileWriter<AvroShallowResource>(datumWriter);
try {
writer.create(schema, outputStream);
} catch (IOException e) {
throw new DistributionException(e);
}
try {
DistributionExportFilter filter = options.getFilter();
for (DistributionExportFilter.TreeFilter treeFilter : filter.getNodeFilters()) {
String path = treeFilter.getPath();
Resource resource = resourceResolver.getResource(path);
AvroShallowResource avroShallowResource = getAvroShallowResource(treeFilter, filter.getPropertyFilter(), resource);
writer.append(avroShallowResource);
}
outputStream.flush();
} catch (Exception e) {
throw new DistributionException(e);
} finally {
try {
writer.close();
} catch (IOException e) {
// do nothing
}
}
}
Aggregations