use of org.apache.avro.generic.GenericRecord in project druid by druid-io.
the class InlineSchemaAvroBytesDecoderTest method testParse.
@Test
public void testParse() throws Exception {
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
GenericRecord actual = new InlineSchemaAvroBytesDecoder(schema).parse(ByteBuffer.wrap(out.toByteArray()));
Assert.assertEquals(someAvroDatum.get("id"), actual.get("id"));
}
use of org.apache.avro.generic.GenericRecord in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoderTest method testParse.
@Test
public void testParse() throws Exception {
// Given
when(registry.getByID(eq(1234))).thenReturn(SomeAvroDatum.getClassSchema());
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put(bytes);
bb.rewind();
// When
GenericRecord actual = new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
// Then
Assert.assertEquals(someAvroDatum.get("id"), actual.get("id"));
}
use of org.apache.avro.generic.GenericRecord in project druid by druid-io.
the class SchemaRegistryBasedAvroBytesDecoderTest method getAvroDatum.
byte[] getAvroDatum(Schema schema, GenericRecord someAvroDatum) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
return out.toByteArray();
}
use of org.apache.avro.generic.GenericRecord in project cdk-examples by cloudera.
the class CreateHCatalogUserDatasetGeneric method run.
@Override
public int run(String[] args) throws Exception {
// Construct an HCatalog dataset repository using managed Hive tables
DatasetRepository repo = DatasetRepositories.open("repo:hive");
// Create a dataset of users with the Avro schema in the repository
DatasetDescriptor descriptor = new DatasetDescriptor.Builder().schemaUri("resource:user.avsc").build();
Dataset<GenericRecord> users = repo.create("users", descriptor);
// Get a writer for the dataset and write some users to it
DatasetWriter<GenericRecord> writer = users.newWriter();
try {
writer.open();
String[] colors = { "green", "blue", "pink", "brown", "yellow" };
Random rand = new Random();
GenericRecordBuilder builder = new GenericRecordBuilder(descriptor.getSchema());
for (int i = 0; i < 100; i++) {
GenericRecord record = builder.set("username", "user-" + i).set("creationDate", System.currentTimeMillis()).set("favoriteColor", colors[rand.nextInt(colors.length)]).build();
writer.write(record);
}
} finally {
writer.close();
}
return 0;
}
use of org.apache.avro.generic.GenericRecord in project cdk-examples by cloudera.
the class ReadHCatalogUserDatasetGeneric method run.
@Override
public int run(String[] args) throws Exception {
// Construct an HCatalog dataset repository using managed Hive tables
DatasetRepository repo = DatasetRepositories.open("repo:hive");
// Load the users dataset
Dataset<GenericRecord> users = repo.load("users");
// Get a reader for the dataset and read all the users
DatasetReader<GenericRecord> reader = users.newReader();
try {
reader.open();
for (GenericRecord user : reader) {
System.out.println(user);
}
} finally {
reader.close();
}
return 0;
}
Aggregations