Search in sources :

Example 81 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.

the class HDFSStoragePerformance method main.

public static void main(String[] args) {
    HDFSStorage storage = new HDFSStorage();
    storage.setBaseDir(".");
    storage.setId("gaurav_flume_1");
    storage.setRestore(true);
    storage.setup(null);
    int count = 1000000;
    logger.debug(" start time {}", System.currentTimeMillis());
    int index = 10000;
    byte[] b = new byte[1024];
    for (int i = 0; i < count; i++) {
        storage.store(new Slice(b, 0, b.length));
    }
    storage.flush();
    for (int i = 0; i < count; i++) {
        storage.store(new Slice(b, 0, b.length));
    }
    storage.flush();
    for (int i = 0; i < count; i++) {
        storage.store(new Slice(b, 0, b.length));
    }
    storage.flush();
    logger.debug(" end time {}", System.currentTimeMillis());
    logger.debug(" start time for retrieve {}", System.currentTimeMillis());
    storage.retrieve(new byte[8]);
    String inputData = new String(b);
    index = 1;
    while (true) {
        b = storage.retrieveNext();
        if (b == null) {
            logger.debug(" end time for retrieve {}", System.currentTimeMillis());
            return;
        } else {
            if (!match(b, inputData)) {
                throw new RuntimeException("failed : " + index);
            }
        }
        index++;
    }
}
Also used : Slice(com.datatorrent.netlet.util.Slice)

Example 82 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.

the class HDFSStorageTest method testCleanForFlushedData.

@Test
public void testCleanForFlushedData() throws IOException {
    byte[] b = new byte[200];
    storage.retrieve(new byte[8]);
    for (int i = 0; i < 5; i++) {
        storage.store(new Slice(b, 0, b.length));
        storage.store(new Slice(b, 0, b.length));
        storage.flush();
    // storage.clean(address);
    }
    byte[] lastWrittenAddress = null;
    for (int i = 0; i < 5; i++) {
        storage.store(new Slice(b, 0, b.length));
        lastWrittenAddress = storage.store(new Slice(b, 0, b.length));
    }
    storage.flush();
    storage.clean(lastWrittenAddress);
    byte[] cleanedOffset = storage.readData(new Path(STORAGE_DIRECTORY + "/1/cleanoffsetFile"));
    Assert.assertArrayEquals(lastWrittenAddress, cleanedOffset);
}
Also used : Path(org.apache.hadoop.fs.Path) Slice(com.datatorrent.netlet.util.Slice) Test(org.junit.Test)

Example 83 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.

the class HDFSStorageTest method testPartialFlush.

/**
 * This test covers following use case 1. Some data is stored 2. File is flush but the file is not close 3. Some more
 * data is stored but the file doesn't roll-overs 4. Retrieve is called for the last returned address and it return
 * nulls 5. Some more data is stored again but the address is returned null because of previous retrieve call
 *
 * @throws Exception
 */
@Test
public void testPartialFlush() throws Exception {
    Assert.assertNull(storage.retrieve(new byte[8]));
    byte[] b = "ab".getBytes();
    byte[] address = storage.store(new Slice(b, 0, b.length));
    Assert.assertNotNull(address);
    storage.flush();
    b = "cb".getBytes();
    byte[] addr = storage.store(new Slice(b, 0, b.length));
    match(storage.retrieve(new byte[8]), "ab");
    Assert.assertNull(storage.retrieve(addr));
    Assert.assertNull(storage.store(new Slice(b, 0, b.length)));
    storage.flush();
    match(storage.retrieve(address), "cb");
    Assert.assertNotNull(storage.store(new Slice(b, 0, b.length)));
}
Also used : Slice(com.datatorrent.netlet.util.Slice) Test(org.junit.Test)

Example 84 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.

the class HDFSStorageTest method testCleanForPartialFlushedData.

@Test
public void testCleanForPartialFlushedData() throws IOException {
    byte[] b = new byte[8];
    storage.retrieve(new byte[8]);
    storage.store(new Slice(b, 0, b.length));
    byte[] bytes = "1a".getBytes();
    byte[] address = storage.store(new Slice(bytes, 0, bytes.length));
    storage.flush();
    storage.clean(address);
    byte[] lastWrittenAddress = null;
    for (int i = 0; i < 5; i++) {
        final byte[] bytes1 = (i + "").getBytes();
        storage.store(new Slice(bytes1, 0, bytes1.length));
        lastWrittenAddress = storage.store(new Slice(b, 0, b.length));
    }
    Assert.assertNull(storage.retrieve(new byte[8]));
    Assert.assertNull(storage.retrieve(lastWrittenAddress));
    storage.store(new Slice(b, 0, b.length));
    storage.flush();
    Assert.assertNull(storage.retrieve(lastWrittenAddress));
}
Also used : Slice(com.datatorrent.netlet.util.Slice) Test(org.junit.Test)

Example 85 with Slice

use of com.datatorrent.netlet.util.Slice in project apex-malhar by apache.

the class HDFSStorageTest method testFailure.

@Test
public void testFailure() throws IOException {
    byte[] address;
    byte[] b = new byte[200];
    storage.retrieve(new byte[8]);
    for (int i = 0; i < 5; i++) {
        storage.store(new Slice(b, 0, b.length));
        address = storage.store(new Slice(b, 0, b.length));
        storage.flush();
        storage.clean(address);
    }
    storage.teardown();
    byte[] identifier = new byte[8];
    storage = getStorage("1", true);
    storage.retrieve(identifier);
    storage.store(new Slice(b, 0, b.length));
    storage.store(new Slice(b, 0, b.length));
    storage.store(new Slice(b, 0, b.length));
    storage.flush();
    byte[] data = storage.retrieve(identifier);
    byte[] tempData = new byte[data.length - 8];
    System.arraycopy(data, 8, tempData, 0, tempData.length);
    Assert.assertEquals("matched the stored value with retrieved value", new String(b), new String(tempData));
}
Also used : Slice(com.datatorrent.netlet.util.Slice) Test(org.junit.Test)

Aggregations

Slice (com.datatorrent.netlet.util.Slice)114 Test (org.junit.Test)65 ByteArrayOutputStream (java.io.ByteArrayOutputStream)10 Input (com.esotericsoftware.kryo.io.Input)9 IOException (java.io.IOException)6 Map (java.util.Map)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 CountDownLatch (java.util.concurrent.CountDownLatch)4 BufferSlice (org.apache.apex.malhar.lib.utils.serde.BufferSlice)4 Path (org.apache.hadoop.fs.Path)4 ObjectMapperString (com.datatorrent.common.util.ObjectMapperString)3 SerializationBuffer (org.apache.apex.malhar.lib.utils.serde.SerializationBuffer)3 StringSerde (org.apache.apex.malhar.lib.utils.serde.StringSerde)3 Attribute (com.datatorrent.api.Attribute)2 OperatorContext (com.datatorrent.api.Context.OperatorContext)2 Output (com.esotericsoftware.kryo.io.Output)2 RandomAccessFile (java.io.RandomAccessFile)2 Serializable (java.io.Serializable)2 HashSet (java.util.HashSet)2