Search in sources :

Example 76 with Random

use of java.util.Random in project hadoop by apache.

the class FoldedTreeSetTest method setUpClass.

@BeforeClass
public static void setUpClass() {
    long seed = System.nanoTime();
    System.out.println("This run uses the random seed " + seed);
    srand = new Random(seed);
}
Also used : Random(java.util.Random) BeforeClass(org.junit.BeforeClass)

Example 77 with Random

use of java.util.Random in project hadoop by apache.

the class TestBestEffortLongFile method testGetSet.

@Test
public void testGetSet() throws IOException {
    BestEffortLongFile f = new BestEffortLongFile(FILE, 12345L);
    try {
        // Before the file exists, should return default.
        assertEquals(12345L, f.get());
        // And first access should open it.
        assertTrue(FILE.exists());
        Random r = new Random();
        for (int i = 0; i < 100; i++) {
            long newVal = r.nextLong();
            // Changing the value should be reflected in the next get() call.
            f.set(newVal);
            assertEquals(newVal, f.get());
            // And should be reflected in a new instance (ie it actually got
            // written to the file)
            BestEffortLongFile f2 = new BestEffortLongFile(FILE, 999L);
            try {
                assertEquals(newVal, f2.get());
            } finally {
                IOUtils.closeStream(f2);
            }
        }
    } finally {
        IOUtils.closeStream(f);
    }
}
Also used : Random(java.util.Random) Test(org.junit.Test)

Example 78 with Random

use of java.util.Random in project hadoop by apache.

the class TestWebHdfsFileSystemContract method testSeek.

public void testSeek() throws IOException {
    final Path dir = new Path("/test/testSeek");
    assertTrue(fs.mkdirs(dir));
    {
        //test zero file size
        final Path zero = new Path(dir, "zero");
        fs.create(zero).close();
        int count = 0;
        final FSDataInputStream in = fs.open(zero);
        for (; in.read() != -1; count++) ;
        in.close();
        assertEquals(0, count);
    }
    final byte[] mydata = new byte[1 << 20];
    new Random().nextBytes(mydata);
    final Path p = new Path(dir, "file");
    FSDataOutputStream out = fs.create(p, false, 4096, (short) 3, 1L << 17);
    out.write(mydata, 0, mydata.length);
    out.close();
    final int one_third = mydata.length / 3;
    final int two_third = one_third * 2;
    {
        //test seek
        final int offset = one_third;
        final int len = mydata.length - offset;
        final byte[] buf = new byte[len];
        final FSDataInputStream in = fs.open(p);
        in.seek(offset);
        //read all remaining data
        in.readFully(buf);
        in.close();
        for (int i = 0; i < buf.length; i++) {
            assertEquals("Position " + i + ", offset=" + offset + ", length=" + len, mydata[i + offset], buf[i]);
        }
    }
    {
        //test position read (read the data after the two_third location)
        final int offset = two_third;
        final int len = mydata.length - offset;
        final byte[] buf = new byte[len];
        final FSDataInputStream in = fs.open(p);
        in.readFully(offset, buf);
        in.close();
        for (int i = 0; i < buf.length; i++) {
            assertEquals("Position " + i + ", offset=" + offset + ", length=" + len, mydata[i + offset], buf[i]);
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Random(java.util.Random) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream)

Example 79 with Random

use of java.util.Random in project hadoop by apache.

the class TestChildReaper method testSomeNodes.

@Test
public void testSomeNodes() throws Exception {
    Timing timing = new Timing();
    ChildReaper reaper = null;
    CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1));
    try {
        client.start();
        Random r = new Random();
        int nonEmptyNodes = 0;
        for (int i = 0; i < 10; ++i) {
            client.create().creatingParentsIfNeeded().forPath("/test/" + Integer.toString(i));
            if (r.nextBoolean()) {
                client.create().forPath("/test/" + Integer.toString(i) + "/foo");
                ++nonEmptyNodes;
            }
        }
        reaper = new ChildReaper(client, "/test", Reaper.Mode.REAP_UNTIL_DELETE, 1);
        reaper.start();
        timing.forWaiting().sleepABit();
        Stat stat = client.checkExists().forPath("/test");
        Assert.assertEquals(stat.getNumChildren(), nonEmptyNodes);
    } finally {
        CloseableUtils.closeQuietly(reaper);
        CloseableUtils.closeQuietly(client);
    }
}
Also used : CuratorFramework(org.apache.curator.framework.CuratorFramework) RetryOneTime(org.apache.curator.retry.RetryOneTime) Stat(org.apache.zookeeper.data.Stat) Random(java.util.Random) Timing(org.apache.curator.test.Timing) Test(org.junit.Test)

Example 80 with Random

use of java.util.Random in project hadoop by apache.

the class TestGSet method testComputeCapacity.

/** 
   * Test for {@link LightWeightGSet#computeCapacity(double, String)}
   */
@Test
public void testComputeCapacity() {
    // Tests for boundary conditions where percent or memory are zero
    testCapacity(0, 0.0);
    testCapacity(100, 0.0);
    testCapacity(0, 100.0);
    // Compute capacity for some 100 random max memory and percentage
    Random r = new Random();
    for (int i = 0; i < 100; i++) {
        long maxMemory = r.nextInt(Integer.MAX_VALUE);
        double percent = r.nextInt(101);
        testCapacity(maxMemory, percent);
    }
}
Also used : Random(java.util.Random) Test(org.junit.Test)

Aggregations

Random (java.util.Random)4728 Test (org.junit.Test)1273 ArrayList (java.util.ArrayList)602 IOException (java.io.IOException)313 HashMap (java.util.HashMap)242 File (java.io.File)209 List (java.util.List)154 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)151 ByteArrayInputStream (java.io.ByteArrayInputStream)134 HashSet (java.util.HashSet)129 ByteBuffer (java.nio.ByteBuffer)123 Test (org.testng.annotations.Test)121 Path (org.apache.hadoop.fs.Path)116 Map (java.util.Map)106 QuickTest (com.hazelcast.test.annotation.QuickTest)99 ParallelTest (com.hazelcast.test.annotation.ParallelTest)94 CountDownLatch (java.util.concurrent.CountDownLatch)93 Configuration (org.apache.hadoop.conf.Configuration)88 ByteArrayOutputStream (java.io.ByteArrayOutputStream)79 Before (org.junit.Before)78