Search in sources :

Example 86 with Random

use of java.util.Random in project hadoop by apache.

the class TestFsck method testCorruptBlock.

@Test
public void testCorruptBlock() throws Exception {
    conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000);
    // Set short retry timeouts so this test runs faster
    conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
    FileSystem fs = null;
    DFSClient dfsClient = null;
    LocatedBlocks blocks = null;
    int replicaCount = 0;
    Random random = new Random();
    String outStr = null;
    short factor = 1;
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
    cluster.waitActive();
    fs = cluster.getFileSystem();
    Path file1 = new Path("/testCorruptBlock");
    DFSTestUtil.createFile(fs, file1, 1024, factor, 0);
    // Wait until file replication has completed
    DFSTestUtil.waitReplication(fs, file1, factor);
    ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, file1);
    // Make sure filesystem is in healthy state
    outStr = runFsck(conf, 0, true, "/");
    System.out.println(outStr);
    assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
    // corrupt replicas
    File blockFile = cluster.getBlockFile(0, block);
    if (blockFile != null && blockFile.exists()) {
        RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw");
        FileChannel channel = raFile.getChannel();
        String badString = "BADBAD";
        int rand = random.nextInt((int) channel.size() / 2);
        raFile.seek(rand);
        raFile.write(badString.getBytes());
        raFile.close();
    }
    // Read the file to trigger reportBadBlocks
    try {
        IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf, true);
    } catch (IOException ie) {
        assertTrue(ie instanceof ChecksumException);
    }
    dfsClient = new DFSClient(new InetSocketAddress("localhost", cluster.getNameNodePort()), conf);
    blocks = dfsClient.getNamenode().getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
    replicaCount = blocks.get(0).getLocations().length;
    while (replicaCount != factor) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException ignore) {
        }
        blocks = dfsClient.getNamenode().getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
        replicaCount = blocks.get(0).getLocations().length;
    }
    assertTrue(blocks.get(0).isCorrupt());
    // Check if fsck reports the same
    outStr = runFsck(conf, 1, true, "/");
    System.out.println(outStr);
    assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
    assertTrue(outStr.contains("testCorruptBlock"));
}
Also used : DFSClient(org.apache.hadoop.hdfs.DFSClient) Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) FileChannel(java.nio.channels.FileChannel) ChecksumException(org.apache.hadoop.fs.ChecksumException) InetSocketAddress(java.net.InetSocketAddress) LocatedBlocks(org.apache.hadoop.hdfs.protocol.LocatedBlocks) ExtendedBlock(org.apache.hadoop.hdfs.protocol.ExtendedBlock) Matchers.anyString(org.mockito.Matchers.anyString) IOException(java.io.IOException) IOUtils(org.apache.hadoop.io.IOUtils) Random(java.util.Random) RandomAccessFile(java.io.RandomAccessFile) FileSystem(org.apache.hadoop.fs.FileSystem) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) RandomAccessFile(java.io.RandomAccessFile) File(java.io.File) Test(org.junit.Test)

Example 87 with Random

use of java.util.Random in project hadoop by apache.

the class TestLargeDirectoryDelete method createFiles.

/** Create a large number of directories and files */
private void createFiles() throws IOException {
    Random rand = new Random();
    // ranging from 0-10.
    for (int i = 0; i < TOTAL_BLOCKS; i += 100) {
        String filename = "/root/";
        // Depth of the directory
        int dirs = rand.nextInt(10);
        for (int j = i; j >= (i - dirs); j--) {
            filename += j + "/";
        }
        filename += "file" + i;
        createFile(filename, 100);
    }
}
Also used : Random(java.util.Random)

Example 88 with Random

use of java.util.Random in project hadoop by apache.

the class TestCoderBase method corruptSomeChunk.

/**
   * Make some chunk messy or not correct any more
   * @param chunks
   */
protected void corruptSomeChunk(ECChunk[] chunks) {
    int idx = new Random().nextInt(chunks.length);
    ByteBuffer buffer = chunks[idx].getBuffer();
    if (buffer.hasRemaining()) {
        buffer.position(buffer.position() + 1);
    }
}
Also used : Random(java.util.Random) ByteBuffer(java.nio.ByteBuffer)

Example 89 with Random

use of java.util.Random in project hadoop by apache.

the class TestVLong method testVLongRandom.

@Test
public void testVLongRandom() throws IOException {
    int count = 1024 * 1024;
    long[] data = new long[count];
    Random rng = new Random();
    for (int i = 0; i < data.length; ++i) {
        int shift = rng.nextInt(Long.SIZE) + 1;
        long mask = (1L << shift) - 1;
        long a = ((long) rng.nextInt()) << 32;
        long b = ((long) rng.nextInt()) & 0xffffffffL;
        data[i] = (a + b) & mask;
    }
    FSDataOutputStream out = fs.create(path);
    for (int i = 0; i < data.length; ++i) {
        Utils.writeVLong(out, data[i]);
    }
    out.close();
    FSDataInputStream in = fs.open(path);
    for (int i = 0; i < data.length; ++i) {
        Assert.assertEquals(Utils.readVLong(in), data[i]);
    }
    in.close();
    fs.delete(path, false);
}
Also used : Random(java.util.Random) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Example 90 with Random

use of java.util.Random in project hadoop by apache.

the class TestTFileSeqFileComparison method setUpDictionary.

private void setUpDictionary() {
    Random rng = new Random();
    dictionary = new byte[options.dictSize][];
    for (int i = 0; i < options.dictSize; ++i) {
        int len = rng.nextInt(options.maxWordLen - options.minWordLen) + options.minWordLen;
        dictionary[i] = new byte[len];
        rng.nextBytes(dictionary[i]);
    }
}
Also used : Random(java.util.Random)

Aggregations

Random (java.util.Random)4728 Test (org.junit.Test)1273 ArrayList (java.util.ArrayList)602 IOException (java.io.IOException)313 HashMap (java.util.HashMap)242 File (java.io.File)209 List (java.util.List)154 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)151 ByteArrayInputStream (java.io.ByteArrayInputStream)134 HashSet (java.util.HashSet)129 ByteBuffer (java.nio.ByteBuffer)123 Test (org.testng.annotations.Test)121 Path (org.apache.hadoop.fs.Path)116 Map (java.util.Map)106 QuickTest (com.hazelcast.test.annotation.QuickTest)99 ParallelTest (com.hazelcast.test.annotation.ParallelTest)94 CountDownLatch (java.util.concurrent.CountDownLatch)93 Configuration (org.apache.hadoop.conf.Configuration)88 ByteArrayOutputStream (java.io.ByteArrayOutputStream)79 Before (org.junit.Before)78