use of java.util.Random in project hadoop by apache.
the class FoldedTreeSetTest method setUpClass.
@BeforeClass
public static void setUpClass() {
long seed = System.nanoTime();
System.out.println("This run uses the random seed " + seed);
srand = new Random(seed);
}
use of java.util.Random in project hadoop by apache.
the class TestBestEffortLongFile method testGetSet.
@Test
public void testGetSet() throws IOException {
BestEffortLongFile f = new BestEffortLongFile(FILE, 12345L);
try {
// Before the file exists, should return default.
assertEquals(12345L, f.get());
// And first access should open it.
assertTrue(FILE.exists());
Random r = new Random();
for (int i = 0; i < 100; i++) {
long newVal = r.nextLong();
// Changing the value should be reflected in the next get() call.
f.set(newVal);
assertEquals(newVal, f.get());
// And should be reflected in a new instance (ie it actually got
// written to the file)
BestEffortLongFile f2 = new BestEffortLongFile(FILE, 999L);
try {
assertEquals(newVal, f2.get());
} finally {
IOUtils.closeStream(f2);
}
}
} finally {
IOUtils.closeStream(f);
}
}
use of java.util.Random in project hadoop by apache.
the class TestWebHdfsFileSystemContract method testSeek.
public void testSeek() throws IOException {
final Path dir = new Path("/test/testSeek");
assertTrue(fs.mkdirs(dir));
{
//test zero file size
final Path zero = new Path(dir, "zero");
fs.create(zero).close();
int count = 0;
final FSDataInputStream in = fs.open(zero);
for (; in.read() != -1; count++) ;
in.close();
assertEquals(0, count);
}
final byte[] mydata = new byte[1 << 20];
new Random().nextBytes(mydata);
final Path p = new Path(dir, "file");
FSDataOutputStream out = fs.create(p, false, 4096, (short) 3, 1L << 17);
out.write(mydata, 0, mydata.length);
out.close();
final int one_third = mydata.length / 3;
final int two_third = one_third * 2;
{
//test seek
final int offset = one_third;
final int len = mydata.length - offset;
final byte[] buf = new byte[len];
final FSDataInputStream in = fs.open(p);
in.seek(offset);
//read all remaining data
in.readFully(buf);
in.close();
for (int i = 0; i < buf.length; i++) {
assertEquals("Position " + i + ", offset=" + offset + ", length=" + len, mydata[i + offset], buf[i]);
}
}
{
//test position read (read the data after the two_third location)
final int offset = two_third;
final int len = mydata.length - offset;
final byte[] buf = new byte[len];
final FSDataInputStream in = fs.open(p);
in.readFully(offset, buf);
in.close();
for (int i = 0; i < buf.length; i++) {
assertEquals("Position " + i + ", offset=" + offset + ", length=" + len, mydata[i + offset], buf[i]);
}
}
}
use of java.util.Random in project hadoop by apache.
the class TestChildReaper method testSomeNodes.
@Test
public void testSomeNodes() throws Exception {
Timing timing = new Timing();
ChildReaper reaper = null;
CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1));
try {
client.start();
Random r = new Random();
int nonEmptyNodes = 0;
for (int i = 0; i < 10; ++i) {
client.create().creatingParentsIfNeeded().forPath("/test/" + Integer.toString(i));
if (r.nextBoolean()) {
client.create().forPath("/test/" + Integer.toString(i) + "/foo");
++nonEmptyNodes;
}
}
reaper = new ChildReaper(client, "/test", Reaper.Mode.REAP_UNTIL_DELETE, 1);
reaper.start();
timing.forWaiting().sleepABit();
Stat stat = client.checkExists().forPath("/test");
Assert.assertEquals(stat.getNumChildren(), nonEmptyNodes);
} finally {
CloseableUtils.closeQuietly(reaper);
CloseableUtils.closeQuietly(client);
}
}
use of java.util.Random in project hadoop by apache.
the class TestGSet method testComputeCapacity.
/**
* Test for {@link LightWeightGSet#computeCapacity(double, String)}
*/
@Test
public void testComputeCapacity() {
// Tests for boundary conditions where percent or memory are zero
testCapacity(0, 0.0);
testCapacity(100, 0.0);
testCapacity(0, 100.0);
// Compute capacity for some 100 random max memory and percentage
Random r = new Random();
for (int i = 0; i < 100; i++) {
long maxMemory = r.nextInt(Integer.MAX_VALUE);
double percent = r.nextInt(101);
testCapacity(maxMemory, percent);
}
}
Aggregations