use of org.apache.avro.util.Utf8 in project flink by apache.
the class DataInputDecoder method readObject.
// --------------------------------------------------------------------------------------------
// serialization
// --------------------------------------------------------------------------------------------
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
// Read in size, and any hidden stuff
s.defaultReadObject();
this.stringDecoder = new Utf8();
this.in = null;
}
use of org.apache.avro.util.Utf8 in project crunch by cloudera.
the class AvrosTest method testTableOf.
@Test
@SuppressWarnings("rawtypes")
public void testTableOf() throws Exception {
AvroType at = Avros.tableOf(Avros.strings(), Avros.strings());
Pair<String, String> j = Pair.of("a", "b");
org.apache.avro.mapred.Pair w = new org.apache.avro.mapred.Pair(at.getSchema());
w.put(0, new Utf8("a"));
w.put(1, new Utf8("b"));
// TODO update this after resolving the o.a.a.m.Pair.equals issue
initialize(at);
assertEquals(j, at.getInputMapFn().map(w));
org.apache.avro.mapred.Pair converted = (org.apache.avro.mapred.Pair) at.getOutputMapFn().map(j);
assertEquals(w.key(), converted.key());
assertEquals(w.value(), converted.value());
}
use of org.apache.avro.util.Utf8 in project crunch by cloudera.
the class AvrosTest method testTupleN.
@Test
@SuppressWarnings("rawtypes")
public void testTupleN() throws Exception {
AvroType at = Avros.tuples(Avros.strings(), Avros.strings(), Avros.strings(), Avros.strings(), Avros.strings());
TupleN j = new TupleN("a", "b", "c", "d", "e");
GenericData.Record w = new GenericData.Record(at.getSchema());
w.put(0, new Utf8("a"));
w.put(1, new Utf8("b"));
w.put(2, new Utf8("c"));
w.put(3, new Utf8("d"));
w.put(4, new Utf8("e"));
testInputOutputFn(at, j, w);
}
use of org.apache.avro.util.Utf8 in project druid by druid-io.
the class DruidParquetInputTest method test.
@Test
public void test() throws IOException, InterruptedException {
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromFile(new File("example/wikipedia_hadoop_parquet_job.json"));
Job job = Job.getInstance(new Configuration());
config.intoConfiguration(job);
GenericRecord data = getFirstRecord(job, "example/wikipedia_list.parquet");
// field not read, should return null
assertEquals(data.get("added"), null);
assertEquals(data.get("page"), new Utf8("Gypsy Danger"));
assertEquals(config.getParser().parse(data).getDimension("page").get(0), "Gypsy Danger");
}
use of org.apache.avro.util.Utf8 in project haivvreo by jghoman.
the class AvroDeserializer method deserializeMap.
private Object deserializeMap(Object datum, Schema mapSchema, MapTypeInfo columnType) throws HaivvreoException {
// Avro only allows maps with Strings for keys, so we only have to worry
// about deserializing the values
Map<String, Object> map = new Hashtable<String, Object>();
Map<Utf8, Object> mapDatum = (Map) datum;
Schema valueSchema = mapSchema.getValueType();
TypeInfo valueTypeInfo = columnType.getMapValueTypeInfo();
for (Utf8 key : mapDatum.keySet()) {
Object value = mapDatum.get(key);
map.put(key.toString(), worker(value, valueSchema, valueTypeInfo));
}
return map;
}
Aggregations