use of org.apache.avro.util.Utf8 in project voldemort by voldemort.
the class AvroBackwardsCompatibilityTest method writeVersion0.
private static byte[] writeVersion0(Schema s0) {
GenericData.Record record = new GenericData.Record(s0);
record.put("original", new Utf8("Abhinay"));
AvroVersionedGenericSerializer serializer = new AvroVersionedGenericSerializer(s0.toString());
return serializer.toBytes(record);
}
use of org.apache.avro.util.Utf8 in project voldemort by voldemort.
the class ClientConfigUtil method readMultipleClientConfigAvro.
/**
* Parses a string that contains multiple fat client configs in avro format
*
* @param configAvro Input string of avro format, that contains config for
* multiple stores
* @return Map of store names to store config properties
*/
@SuppressWarnings("unchecked")
public static Map<String, Properties> readMultipleClientConfigAvro(String configAvro) {
Map<String, Properties> mapStoreToProps = Maps.newHashMap();
try {
JsonDecoder decoder = new JsonDecoder(CLIENT_CONFIGS_AVRO_SCHEMA, configAvro);
GenericDatumReader<Object> datumReader = new GenericDatumReader<Object>(CLIENT_CONFIGS_AVRO_SCHEMA);
Map<Utf8, Map<Utf8, Utf8>> storeConfigs = (Map<Utf8, Map<Utf8, Utf8>>) datumReader.read(null, decoder);
// Store config props to return back
for (Utf8 storeName : storeConfigs.keySet()) {
Properties props = new Properties();
Map<Utf8, Utf8> singleConfig = storeConfigs.get(storeName);
for (Utf8 key : singleConfig.keySet()) {
props.put(key.toString(), singleConfig.get(key).toString());
}
if (storeName == null || storeName.length() == 0) {
throw new Exception("Invalid store name found!");
}
mapStoreToProps.put(storeName.toString(), props);
}
} catch (Exception e) {
e.printStackTrace();
}
return mapStoreToProps;
}
use of org.apache.avro.util.Utf8 in project voldemort by voldemort.
the class AvroVersionedGenericSerializertest method testRecordStringSchema.
public void testRecordStringSchema() {
String stringSchema = "\"string\"";
AvroVersionedGenericSerializer stringSerializer = new AvroVersionedGenericSerializer(stringSchema);
// string and a record with string serializes to the same bytes.
String recordSchema = "{\"type\": \"record\", \"name\": \"myrec\",\"fields\": [{ \"name\": \"original\", \"type\": \"string\" }]}";
AvroVersionedGenericSerializer recordSerializer = new AvroVersionedGenericSerializer(recordSchema);
Utf8 sample = new Utf8("abc");
byte[] byte1 = stringSerializer.toBytes(sample);
Object obj = recordSerializer.toObject(byte1);
assertEquals(" should serialize to same value", Record.class, obj.getClass());
}
use of org.apache.avro.util.Utf8 in project voldemort by voldemort.
the class JsonTypeSerializerTest method testStringSerializer.
public void testStringSerializer() {
String serializer = quote("string");
assertInverse(serializer, "asdfasdf d");
JsonTypeSerializer jsonSerializer = getSerializer(serializer);
Object val = doubleInvert(jsonSerializer, "asdf");
assertEquals("Return type should be string", String.class, val.getClass());
assertInverse(serializer, null);
assertToBytesFails(serializer, 123);
assertToBytesFails(serializer, new Utf8("avro utf8"));
assertToBytesFails(serializer, 'C');
assertToBytesFails(serializer, Arrays.asList(new String[] { "a", "b" }));
assertToBytesFails(serializer, Arrays.asList(new char[] { 'a', 'b' }));
assertToBytesFails(serializer, "abc".getBytes());
assertToBytesFails(serializer, "abc".toCharArray());
}
use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class CFMetaDataTest method testThriftToAvroConversion.
@Test
public void testThriftToAvroConversion() throws Exception {
CfDef cfDef = new CfDef().setDefault_validation_class(AsciiType.class.getCanonicalName()).setComment("Test comment").setColumn_metadata(columnDefs).setKeyspace(KEYSPACE).setName(COLUMN_FAMILY);
// convert Thrift to CFMetaData
CFMetaData cfMetaData = CFMetaData.fromThrift(cfDef);
// make a correct Avro object
org.apache.cassandra.db.migration.avro.CfDef avroCfDef = new org.apache.cassandra.db.migration.avro.CfDef();
avroCfDef.keyspace = new Utf8(KEYSPACE);
avroCfDef.name = new Utf8(COLUMN_FAMILY);
avroCfDef.default_validation_class = new Utf8(cfDef.default_validation_class);
avroCfDef.comment = new Utf8(cfDef.comment);
avroCfDef.column_metadata = new ArrayList<org.apache.cassandra.db.migration.avro.ColumnDef>();
for (ColumnDef columnDef : columnDefs) {
org.apache.cassandra.db.migration.avro.ColumnDef c = new org.apache.cassandra.db.migration.avro.ColumnDef();
c.name = ByteBufferUtil.clone(columnDef.name);
c.validation_class = new Utf8(columnDef.getValidation_class());
c.index_name = new Utf8(columnDef.getIndex_name());
c.index_type = org.apache.cassandra.db.migration.avro.IndexType.KEYS;
avroCfDef.column_metadata.add(c);
}
org.apache.cassandra.db.migration.avro.CfDef converted = cfMetaData.toAvro();
assertEquals(avroCfDef.keyspace, converted.keyspace);
assertEquals(avroCfDef.name, converted.name);
assertEquals(avroCfDef.default_validation_class, converted.default_validation_class);
assertEquals(avroCfDef.comment, converted.comment);
assertEquals(avroCfDef.column_metadata, converted.column_metadata);
}
Aggregations