use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class CompressionParameters method asAvroOptions.
public Map<CharSequence, CharSequence> asAvroOptions() {
Map<CharSequence, CharSequence> options = new HashMap<CharSequence, CharSequence>();
for (Map.Entry<String, String> entry : otherOptions.entrySet()) options.put(new Utf8(entry.getKey()), new Utf8(entry.getValue()));
if (sstableCompressor == null)
return options;
options.put(new Utf8(SSTABLE_COMPRESSION), new Utf8(sstableCompressor.getClass().getName()));
if (chunkLength != null)
options.put(new Utf8(CHUNK_LENGTH_KB), new Utf8(chunkLengthInKB()));
return options;
}
use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class DefsTest method testCFMetaDataApply.
@Test
public void testCFMetaDataApply() throws ConfigurationException {
Map<ByteBuffer, ColumnDefinition> indexes = new HashMap<ByteBuffer, ColumnDefinition>();
for (int i = 0; i < 5; i++) {
ByteBuffer name = ByteBuffer.wrap(new byte[] { (byte) i });
indexes.put(name, new ColumnDefinition(name, BytesType.instance, IndexType.KEYS, null, Integer.toString(i)));
}
CFMetaData cfm = new CFMetaData("Keyspace1", "TestApplyCFM_CF", ColumnFamilyType.Standard, BytesType.instance, null);
cfm.comment("No comment").readRepairChance(0.5).replicateOnWrite(false).gcGraceSeconds(100000).defaultValidator(null).minCompactionThreshold(500).maxCompactionThreshold(500).mergeShardsChance(0.0).columnMetadata(indexes);
// we'll be adding this one later. make sure it's not already there.
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 5 })) == null;
org.apache.cassandra.db.migration.avro.CfDef cfDef = cfm.toAvro();
// add one.
org.apache.cassandra.db.migration.avro.ColumnDef addIndexDef = new org.apache.cassandra.db.migration.avro.ColumnDef();
addIndexDef.index_name = "5";
addIndexDef.index_type = org.apache.cassandra.db.migration.avro.IndexType.KEYS;
addIndexDef.name = ByteBuffer.wrap(new byte[] { 5 });
addIndexDef.validation_class = BytesType.class.getName();
cfDef.column_metadata.add(addIndexDef);
// remove one.
org.apache.cassandra.db.migration.avro.ColumnDef removeIndexDef = new org.apache.cassandra.db.migration.avro.ColumnDef();
removeIndexDef.index_name = new Utf8("0");
removeIndexDef.index_type = org.apache.cassandra.db.migration.avro.IndexType.KEYS;
removeIndexDef.name = ByteBuffer.wrap(new byte[] { 0 });
removeIndexDef.validation_class = new Utf8(BytesType.class.getName());
assert cfDef.column_metadata.remove(removeIndexDef);
cfm.apply(cfDef);
for (int i = 1; i < indexes.size(); i++) assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 1 })) != null;
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 0 })) == null;
assert cfm.getColumn_metadata().get(ByteBuffer.wrap(new byte[] { 5 })) != null;
}
use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class DefsTest method testZeroInjection.
@Test
public void testZeroInjection() throws IOException {
org.apache.cassandra.db.migration.avro.CfDef cd = new org.apache.cassandra.db.migration.avro.CfDef();
// populate only fields that must be non-null.
cd.keyspace = new Utf8("Lest Ks");
cd.name = new Utf8("Mest Cf");
org.apache.cassandra.db.migration.avro.CfDef cd2 = SerDeUtils.deserializeWithSchema(SerDeUtils.serializeWithSchema(cd), new org.apache.cassandra.db.migration.avro.CfDef());
assert cd.equals(cd2);
// make sure some of the fields didn't get unexpected zeros put in during [de]serialize operations.
assert cd.min_compaction_threshold == null;
assert cd2.min_compaction_threshold == null;
assert cd.compaction_strategy == null;
}
use of org.apache.avro.util.Utf8 in project flink by apache.
the class AvroRecordInputFormatTest method doTestDeserializationGenericRecord.
/**
* Helper method to test GenericRecord serialisation
*
* @param format
* the format to test
* @param parameters
* the configuration to use
* @throws IOException
* thrown id there is a issue
*/
@SuppressWarnings("unchecked")
private void doTestDeserializationGenericRecord(final AvroInputFormat<GenericRecord> format, final Configuration parameters) throws IOException {
try {
format.configure(parameters);
FileInputSplit[] splits = format.createInputSplits(1);
assertEquals(splits.length, 1);
format.open(splits[0]);
GenericRecord u = format.nextRecord(null);
assertNotNull(u);
assertEquals("The schemas should be equal", userSchema, u.getSchema());
String name = u.get("name").toString();
assertNotNull("empty record", name);
assertEquals("name not equal", TEST_NAME, name);
// check arrays
List<CharSequence> sl = (List<CharSequence>) u.get("type_array_string");
assertEquals("element 0 not equal", TEST_ARRAY_STRING_1, sl.get(0).toString());
assertEquals("element 1 not equal", TEST_ARRAY_STRING_2, sl.get(1).toString());
List<Boolean> bl = (List<Boolean>) u.get("type_array_boolean");
assertEquals("element 0 not equal", TEST_ARRAY_BOOLEAN_1, bl.get(0));
assertEquals("element 1 not equal", TEST_ARRAY_BOOLEAN_2, bl.get(1));
// check enums
GenericData.EnumSymbol enumValue = (GenericData.EnumSymbol) u.get("type_enum");
assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), enumValue.toString());
// check maps
Map<CharSequence, Long> lm = (Map<CharSequence, Long>) u.get("type_map");
assertEquals("map value of key 1 not equal", TEST_MAP_VALUE1, lm.get(new Utf8(TEST_MAP_KEY1)).longValue());
assertEquals("map value of key 2 not equal", TEST_MAP_VALUE2, lm.get(new Utf8(TEST_MAP_KEY2)).longValue());
assertFalse("expecting second element", format.reachedEnd());
assertNotNull("expecting second element", format.nextRecord(u));
assertNull(format.nextRecord(u));
assertTrue(format.reachedEnd());
} finally {
format.close();
}
}
use of org.apache.avro.util.Utf8 in project flink by apache.
the class AvroRecordInputFormatTest method testDeserialisationReuseAvroRecordFalse.
/**
* Test if the AvroInputFormat is able to properly read data from an avro file.
* @throws IOException
*/
@Test
public void testDeserialisationReuseAvroRecordFalse() throws IOException {
Configuration parameters = new Configuration();
AvroInputFormat<User> format = new AvroInputFormat<User>(new Path(testFile.getAbsolutePath()), User.class);
format.setReuseAvroValue(false);
format.configure(parameters);
FileInputSplit[] splits = format.createInputSplits(1);
assertEquals(splits.length, 1);
format.open(splits[0]);
User u = format.nextRecord(null);
assertNotNull(u);
String name = u.getName().toString();
assertNotNull("empty record", name);
assertEquals("name not equal", TEST_NAME, name);
// check arrays
List<CharSequence> sl = u.getTypeArrayString();
assertEquals("element 0 not equal", TEST_ARRAY_STRING_1, sl.get(0).toString());
assertEquals("element 1 not equal", TEST_ARRAY_STRING_2, sl.get(1).toString());
List<Boolean> bl = u.getTypeArrayBoolean();
assertEquals("element 0 not equal", TEST_ARRAY_BOOLEAN_1, bl.get(0));
assertEquals("element 1 not equal", TEST_ARRAY_BOOLEAN_2, bl.get(1));
// check enums
Colors enumValue = u.getTypeEnum();
assertEquals("enum not equal", TEST_ENUM_COLOR, enumValue);
// check maps
Map<CharSequence, Long> lm = u.getTypeMap();
assertEquals("map value of key 1 not equal", TEST_MAP_VALUE1, lm.get(new Utf8(TEST_MAP_KEY1)).longValue());
assertEquals("map value of key 2 not equal", TEST_MAP_VALUE2, lm.get(new Utf8(TEST_MAP_KEY2)).longValue());
assertFalse("expecting second element", format.reachedEnd());
assertNotNull("expecting second element", format.nextRecord(u));
assertNull(format.nextRecord(u));
assertTrue(format.reachedEnd());
format.close();
}
Aggregations