use of org.apache.druid.data.input.impl.NestedInputFormat in project druid by druid-io.
the class ProtobufInputFormatTest method testSerde.
@Test
public void testSerde() throws IOException {
ProtobufInputFormat inputFormat = new ProtobufInputFormat(flattenSpec, decoder);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
Assert.assertEquals(inputFormat, inputFormat2);
}
use of org.apache.druid.data.input.impl.NestedInputFormat in project druid by druid-io.
the class AvroStreamInputFormatTest method testSerdeNonDefault.
@Test
public void testSerdeNonDefault() throws IOException {
Repository repository = new Avro1124RESTRepositoryClientWrapper("http://github.io");
AvroStreamInputFormat inputFormat = new AvroStreamInputFormat(flattenSpec, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), true, true);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
Assert.assertEquals(inputFormat, inputFormat2);
}
use of org.apache.druid.data.input.impl.NestedInputFormat in project druid by druid-io.
the class AvroStreamInputFormatTest method testSerde.
@Test
public void testSerde() throws IOException {
Repository repository = new Avro1124RESTRepositoryClientWrapper("http://github.io");
AvroStreamInputFormat inputFormat = new AvroStreamInputFormat(flattenSpec, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), false, false);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
Assert.assertEquals(inputFormat, inputFormat2);
}
use of org.apache.druid.data.input.impl.NestedInputFormat in project druid by druid-io.
the class AvroStreamInputFormatTest method testParse.
@Test
public void testParse() throws SchemaValidationException, IOException {
Repository repository = new InMemoryRepository(null);
AvroStreamInputFormat inputFormat = new AvroStreamInputFormat(flattenSpec, new SchemaRepoBasedAvroBytesDecoder<>(new Avro1124SubjectAndIdConverter(TOPIC), repository), false, false);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
repository = ((SchemaRepoBasedAvroBytesDecoder) ((AvroStreamInputFormat) inputFormat2).getAvroBytesDecoder()).getSchemaRepository();
// prepare data
GenericRecord someAvroDatum = buildSomeAvroDatum();
// encode schema id
Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC);
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<>(repository, new IntegerConverter(), new AvroSchemaConverter(), new IdentityConverter());
Integer id = repositoryClient.registerSchema(TOPIC, SomeAvroDatum.getClassSchema());
ByteBuffer byteBuffer = ByteBuffer.allocate(4);
converter.putSubjectAndId(id, byteBuffer);
ByteArrayOutputStream out = new ByteArrayOutputStream();
out.write(byteBuffer.array());
// encode data
DatumWriter<GenericRecord> writer = new SpecificDatumWriter<>(someAvroDatum.getSchema());
// write avro datum to bytes
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
final ByteEntity entity = new ByteEntity(ByteBuffer.wrap(out.toByteArray()));
InputRow inputRow = inputFormat2.createReader(new InputRowSchema(timestampSpec, dimensionsSpec, null), entity, null).read().next();
assertInputRowCorrect(inputRow, DIMENSIONS, false);
}
use of org.apache.druid.data.input.impl.NestedInputFormat in project druid by druid-io.
the class AvroOCFInputFormatTest method testSerdeNonDefaults.
@Test
public void testSerdeNonDefaults() throws Exception {
String schemaStr = "{\n" + " \"namespace\": \"org.apache.druid.data.input\",\n" + " \"name\": \"SomeAvroDatum\",\n" + " \"type\": \"record\",\n" + " \"fields\" : [\n" + " {\"name\":\"timestamp\",\"type\":\"long\"},\n" + " {\"name\":\"someLong\",\"type\":\"long\"}\n," + " {\"name\":\"eventClass\",\"type\":\"string\", \"aliases\": [\"eventType\"]}\n" + " ]\n" + "}";
TypeReference<Map<String, Object>> typeRef = new TypeReference<Map<String, Object>>() {
};
final Map<String, Object> readerSchema = jsonMapper.readValue(schemaStr, typeRef);
AvroOCFInputFormat inputFormat = new AvroOCFInputFormat(jsonMapper, flattenSpec, readerSchema, true, true);
NestedInputFormat inputFormat2 = jsonMapper.readValue(jsonMapper.writeValueAsString(inputFormat), NestedInputFormat.class);
Assert.assertEquals(inputFormat, inputFormat2);
}
Aggregations