use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.
the class HDFSStorageTest method testCleanUnflushedData.
/**
* This test case tests the clean call before any flush is called.
*
* @throws IOException
*/
@Test
public void testCleanUnflushedData() throws IOException {
for (int i = 0; i < 5; i++) {
final byte[] bytes = (i + "").getBytes();
storage.store(new Slice(bytes, 0, bytes.length));
}
storage.clean(new byte[8]);
storage.flush();
match(storage.retrieve(new byte[8]), "0");
match(storage.retrieveNext(), "1");
}
use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.
the class CollectionSerdeTest method testSerdeSet.
@Test
public void testSerdeSet() {
CollectionSerde<String, Set<String>> serdeSet = new CollectionSerde<>(new StringSerde(), (Class) HashSet.class);
Set<String> stringList = Sets.newHashSet("a", "b", "c");
SerializationBuffer buffer = new SerializationBuffer(new WindowedBlockStream());
serdeSet.serialize(stringList, buffer);
Slice slice = buffer.toSlice();
Set<String> deserializedSet = serdeSet.deserialize(new Input(slice.buffer, slice.offset, slice.length));
Assert.assertEquals(stringList, deserializedSet);
}
use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.
the class PairSerdeTest method simpleSerdeTest.
@Test
public void simpleSerdeTest() {
PairSerde<String, Integer> serdePair = new PairSerde<>(new StringSerde(), new IntSerde());
Pair<String, Integer> pair = new ImmutablePair<>("abc", 123);
SerializationBuffer buffer = new SerializationBuffer(new WindowedBlockStream());
serdePair.serialize(pair, buffer);
Slice slice = buffer.toSlice();
Pair<String, Integer> deserializedPair = serdePair.deserialize(new Input(slice.buffer, slice.offset, slice.length));
Assert.assertEquals(pair, deserializedPair);
}
use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.
the class BucketsFileSystemTest method testUpdateBucketMetaDataFile.
@Test
public void testUpdateBucketMetaDataFile() throws IOException {
testMeta.bucketsFileSystem.setup(testMeta.managedStateContext);
BucketsFileSystem.MutableTimeBucketMeta mutableTbm = new BucketsFileSystem.MutableTimeBucketMeta(1, 1);
mutableTbm.updateTimeBucketMeta(10, 100, new Slice("1".getBytes()));
testMeta.bucketsFileSystem.updateTimeBuckets(mutableTbm);
testMeta.bucketsFileSystem.updateBucketMetaFile(1);
BucketsFileSystem.TimeBucketMeta immutableTbm = testMeta.bucketsFileSystem.getTimeBucketMeta(1, 1);
Assert.assertNotNull(immutableTbm);
Assert.assertEquals("last transferred window", 10, immutableTbm.getLastTransferredWindowId());
Assert.assertEquals("size in bytes", 100, immutableTbm.getSizeInBytes());
Assert.assertEquals("first key", "1", immutableTbm.getFirstKey().stringValue());
testMeta.bucketsFileSystem.teardown();
}
use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.
the class ManagedStateTestUtils method getTestBucketData.
public static Map<Slice, Bucket.BucketedValue> getTestBucketData(int keyStart, long timeBucketStart) {
Map<Slice, Bucket.BucketedValue> bucketData = Maps.newHashMap();
for (int j = 0; j < 5; j++) {
Slice keyVal = new Slice(Integer.toString(keyStart).getBytes());
bucketData.put(keyVal, new Bucket.BucketedValue(timeBucketStart + j, keyVal));
keyStart++;
}
return bucketData;
}
Aggregations