use of java.util.Random in project hadoop by apache.
the class TestTFileByteArrays method testFailureOpenRandomFile.
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
use of java.util.Random in project hadoop by apache.
the class AppendTestUtil method randomBytes.
public static byte[] randomBytes(long seed, int size) {
LOG.info("seed=" + seed + ", size=" + size);
final byte[] b = new byte[size];
final Random rand = new Random(seed);
rand.nextBytes(b);
return b;
}
use of java.util.Random in project hadoop by apache.
the class DFSTestUtil method checkFiles.
/** check if the files have been copied correctly. */
public boolean checkFiles(FileSystem fs, String topdir) throws IOException {
Path root = new Path(topdir);
for (int idx = 0; idx < nFiles; idx++) {
Path fPath = new Path(root, files[idx].getName());
try (FSDataInputStream in = fs.open(fPath)) {
byte[] toRead = new byte[files[idx].getSize()];
byte[] toCompare = new byte[files[idx].getSize()];
Random rb = new Random(files[idx].getSeed());
rb.nextBytes(toCompare);
in.readFully(0, toRead);
for (int i = 0; i < toRead.length; i++) {
if (toRead[i] != toCompare[i]) {
return false;
}
}
}
}
return true;
}
use of java.util.Random in project hadoop by apache.
the class TestEnhancedByteBufferAccess method testIndirectFallbackReads.
/**
* Test fallback reads on a stream which does not support the
* ByteBufferReadable * interface.
*/
@Test
public void testIndirectFallbackReads() throws Exception {
final String testPath = GenericTestUtils.getTestDir("indirectFallbackTestFile").getAbsolutePath();
final int TEST_FILE_LENGTH = 16385;
final int RANDOM_SEED = 23453;
FileOutputStream fos = null;
FileInputStream fis = null;
try {
fos = new FileOutputStream(testPath);
Random random = new Random(RANDOM_SEED);
byte[] original = new byte[TEST_FILE_LENGTH];
random.nextBytes(original);
fos.write(original);
fos.close();
fos = null;
fis = new FileInputStream(testPath);
testFallbackImpl(fis, original);
} finally {
IOUtils.cleanup(LOG, fos, fis);
new File(testPath).delete();
}
}
use of java.util.Random in project hadoop by apache.
the class DFSTestUtil method appendFile.
/**
* Append specified length of bytes to a given file
* @param fs The file system
* @param p Path of the file to append
* @param length Length of bytes to append to the file
* @throws IOException
*/
public static void appendFile(FileSystem fs, Path p, int length) throws IOException {
assert fs.exists(p);
assert length >= 0;
byte[] toAppend = new byte[length];
Random random = new Random();
random.nextBytes(toAppend);
try (FSDataOutputStream out = fs.append(p)) {
out.write(toAppend);
}
}
Aggregations