Search in sources :

Example 96 with Random

use of java.util.Random in project hadoop by apache.

the class TestPread method datanodeRestartTest.

// test pread can survive datanode restarts
private void datanodeRestartTest(MiniDFSCluster cluster, FileSystem fileSys, Path name) throws IOException {
    // don't survive datanode restarts.
    if (simulatedStorage) {
        return;
    }
    int numBlocks = 1;
    assertTrue(numBlocks <= HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT);
    byte[] expected = new byte[numBlocks * blockSize];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
    byte[] actual = new byte[numBlocks * blockSize];
    FSDataInputStream stm = fileSys.open(name);
    // read a block and get block locations cached as a result
    stm.readFully(0, actual);
    checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Setup");
    // restart all datanodes. it is expected that they will
    // restart on different ports, hence, cached block locations
    // will no longer work.
    assertTrue(cluster.restartDataNodes());
    cluster.waitActive();
    // verify the block can be read again using the same InputStream 
    // (via re-fetching of block locations from namenode). there is a 
    // 3 sec sleep in chooseDataNode(), which can be shortened for 
    // this test if configurable.
    stm.readFully(0, actual);
    checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Test");
}
Also used : Random(java.util.Random) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream)

Example 97 with Random

use of java.util.Random in project hadoop by apache.

the class TestCachedBlocksList method testMultipleLists.

@Test(timeout = 60000)
public void testMultipleLists() {
    DatanodeDescriptor[] datanodes = new DatanodeDescriptor[] { new DatanodeDescriptor(new DatanodeID("127.0.0.1", "localhost", "abcd", 5000, 5001, 5002, 5003)), new DatanodeDescriptor(new DatanodeID("127.0.1.1", "localhost", "efgh", 6000, 6001, 6002, 6003)) };
    CachedBlocksList[] lists = new CachedBlocksList[] { datanodes[0].getPendingCached(), datanodes[0].getCached(), datanodes[1].getPendingCached(), datanodes[1].getCached(), datanodes[1].getPendingUncached() };
    final int NUM_BLOCKS = 8000;
    CachedBlock[] blocks = new CachedBlock[NUM_BLOCKS];
    for (int i = 0; i < NUM_BLOCKS; i++) {
        blocks[i] = new CachedBlock(i, (short) i, true);
    }
    Random r = new Random(654);
    for (CachedBlocksList list : lists) {
        testAddElementsToList(list, blocks);
    }
    for (CachedBlocksList list : lists) {
        testRemoveElementsFromList(r, list, blocks);
    }
}
Also used : DatanodeDescriptor(org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor) DatanodeID(org.apache.hadoop.hdfs.protocol.DatanodeID) CachedBlocksList(org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList) Random(java.util.Random) CachedBlock(org.apache.hadoop.hdfs.server.namenode.CachedBlock) Test(org.junit.Test)

Example 98 with Random

use of java.util.Random in project hadoop by apache.

the class TestBlockTokenWithDFS method generateBytes.

public static byte[] generateBytes(int fileSize) {
    Random r = new Random();
    byte[] rawData = new byte[fileSize];
    r.nextBytes(rawData);
    return rawData;
}
Also used : Random(java.util.Random)

Example 99 with Random

use of java.util.Random in project hadoop by apache.

the class TestEditLog method testFuzzSequences.

/**
   * "Fuzz" test for the edit log.
   *
   * This tests that we can read random garbage from the edit log without
   * crashing the JVM or throwing an unchecked exception.
   */
@Test
public void testFuzzSequences() throws IOException {
    final int MAX_GARBAGE_LENGTH = 512;
    final int MAX_INVALID_SEQ = 5000;
    // The seed to use for our random number generator.  When given the same
    // seed, Java.util.Random will always produce the same sequence of values.
    // This is important because it means that the test is deterministic and
    // repeatable on any machine.
    final int RANDOM_SEED = 123;
    Random r = new Random(RANDOM_SEED);
    for (int i = 0; i < MAX_INVALID_SEQ; i++) {
        byte[] garbage = new byte[r.nextInt(MAX_GARBAGE_LENGTH)];
        r.nextBytes(garbage);
        validateNoCrash(garbage);
    }
}
Also used : Random(java.util.Random) Test(org.junit.Test)

Example 100 with Random

use of java.util.Random in project hadoop by apache.

the class TestCodec method testSplitableCodec.

private void testSplitableCodec(Class<? extends SplittableCompressionCodec> codecClass) throws IOException {
    final long DEFLBYTES = 2 * 1024 * 1024;
    final Configuration conf = new Configuration();
    final Random rand = new Random();
    final long seed = rand.nextLong();
    LOG.info("seed: " + seed);
    rand.setSeed(seed);
    SplittableCompressionCodec codec = ReflectionUtils.newInstance(codecClass, conf);
    final FileSystem fs = FileSystem.getLocal(conf);
    final FileStatus infile = fs.getFileStatus(writeSplitTestFile(fs, rand, codec, DEFLBYTES));
    if (infile.getLen() > Integer.MAX_VALUE) {
        fail("Unexpected compression: " + DEFLBYTES + " -> " + infile.getLen());
    }
    final int flen = (int) infile.getLen();
    final Text line = new Text();
    final Decompressor dcmp = CodecPool.getDecompressor(codec);
    try {
        for (int pos = 0; pos < infile.getLen(); pos += rand.nextInt(flen / 8)) {
            // read from random positions, verifying that there exist two sequential
            // lines as written in writeSplitTestFile
            final SplitCompressionInputStream in = codec.createInputStream(fs.open(infile.getPath()), dcmp, pos, flen, SplittableCompressionCodec.READ_MODE.BYBLOCK);
            if (in.getAdjustedStart() >= flen) {
                break;
            }
            LOG.info("SAMPLE " + in.getAdjustedStart() + "," + in.getAdjustedEnd());
            final LineReader lreader = new LineReader(in);
            // ignore; likely partial
            lreader.readLine(line);
            if (in.getPos() >= flen) {
                break;
            }
            lreader.readLine(line);
            final int seq1 = readLeadingInt(line);
            lreader.readLine(line);
            if (in.getPos() >= flen) {
                break;
            }
            final int seq2 = readLeadingInt(line);
            assertEquals("Mismatched lines", seq1 + 1, seq2);
        }
    } finally {
        CodecPool.returnDecompressor(dcmp);
    }
    // remove on success
    fs.delete(infile.getPath().getParent(), true);
}
Also used : FileStatus(org.apache.hadoop.fs.FileStatus) BuiltInGzipDecompressor(org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor) Configuration(org.apache.hadoop.conf.Configuration) Random(java.util.Random) FileSystem(org.apache.hadoop.fs.FileSystem) LineReader(org.apache.hadoop.util.LineReader) Text(org.apache.hadoop.io.Text)

Aggregations

Random (java.util.Random)4728 Test (org.junit.Test)1273 ArrayList (java.util.ArrayList)602 IOException (java.io.IOException)313 HashMap (java.util.HashMap)242 File (java.io.File)209 List (java.util.List)154 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)151 ByteArrayInputStream (java.io.ByteArrayInputStream)134 HashSet (java.util.HashSet)129 ByteBuffer (java.nio.ByteBuffer)123 Test (org.testng.annotations.Test)121 Path (org.apache.hadoop.fs.Path)116 Map (java.util.Map)106 QuickTest (com.hazelcast.test.annotation.QuickTest)99 ParallelTest (com.hazelcast.test.annotation.ParallelTest)94 CountDownLatch (java.util.concurrent.CountDownLatch)93 Configuration (org.apache.hadoop.conf.Configuration)88 ByteArrayOutputStream (java.io.ByteArrayOutputStream)79 Before (org.junit.Before)78