use of java.io.RandomAccessFile in project vert.x by eclipse.
the class FileSystemImpl method truncateInternal.
private BlockingAction<Void> truncateInternal(String p, long len, Handler<AsyncResult<Void>> handler) {
Objects.requireNonNull(p);
return new BlockingAction<Void>(handler) {
public Void perform() {
RandomAccessFile raf = null;
try {
String path = vertx.resolveFile(p).getAbsolutePath();
if (len < 0) {
throw new FileSystemException("Cannot truncate file to size < 0");
}
if (!Files.exists(Paths.get(path))) {
throw new FileSystemException("Cannot truncate file " + path + ". Does not exist");
}
try {
raf = new RandomAccessFile(path, "rw");
raf.setLength(len);
} finally {
if (raf != null)
raf.close();
}
} catch (IOException e) {
throw new FileSystemException(e);
}
return null;
}
};
}
use of java.io.RandomAccessFile in project druid by druid-io.
the class FileUtilsTest method testMap.
@Test
public void testMap() throws IOException {
File dataFile = folder.newFile("data");
long buffersMemoryBefore = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers();
try (RandomAccessFile raf = new RandomAccessFile(dataFile, "rw")) {
raf.write(42);
// 1 MB
raf.setLength(1 << 20);
}
try (MappedByteBufferHandler mappedByteBufferHandler = FileUtils.map(dataFile)) {
Assert.assertEquals(42, mappedByteBufferHandler.get().get(0));
}
long buffersMemoryAfter = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers();
Assert.assertEquals(buffersMemoryBefore, buffersMemoryAfter);
}
use of java.io.RandomAccessFile in project druid by druid-io.
the class SmooshedFileMapperTest method testDeterministicFileUnmapping.
@Test
public void testDeterministicFileUnmapping() throws IOException {
File baseDir = folder.newFolder("base");
long totalMemoryUsedBeforeAddingFile = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers();
try (FileSmoosher smoosher = new FileSmoosher(baseDir)) {
File dataFile = folder.newFile("data.bin");
try (RandomAccessFile raf = new RandomAccessFile(dataFile, "rw")) {
// 1 MB
raf.setLength(1 << 20);
}
smoosher.add(dataFile);
}
long totalMemoryUsedAfterAddingFile = BufferUtils.totalMemoryUsedByDirectAndMappedBuffers();
// Assert no hanging file mappings left by either smoosher or smoosher.add(file)
Assert.assertEquals(totalMemoryUsedBeforeAddingFile, totalMemoryUsedAfterAddingFile);
}
use of java.io.RandomAccessFile in project druid by druid-io.
the class GenericIndexedWriter method bagSizePower.
/**
* Tries to get best value split(number of elements in each value file) which can be expressed as power of 2.
*
* @return Returns the size of value file splits as power of 2.
*
* @throws IOException
*/
private int bagSizePower() throws IOException {
long avgObjectSize = (valuesOut.getCount() + numWritten - 1) / numWritten;
File f = ioPeon.getFile(makeFilename("headerLong"));
Preconditions.checkNotNull(f, "header file missing.");
try (RandomAccessFile headerFile = new RandomAccessFile(f, "r")) {
for (int i = 31; i >= 0; --i) {
if ((1L << i) * avgObjectSize <= fileSizeLimit) {
if (actuallyFits(i, headerFile)) {
return i;
}
}
}
}
throw new ISE("no value split found with fileSizeLimit [%d], avgObjectSize [%d] while serializing [%s]", fileSizeLimit, avgObjectSize, filenameBase);
}
use of java.io.RandomAccessFile in project druid by druid-io.
the class GenericIndexedWriter method writeToChannelVersionTwo.
private void writeToChannelVersionTwo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException {
if (smoosher == null) {
throw new IAE("version 2 GenericIndexedWriter requires FileSmoosher.");
}
int bagSizePower = bagSizePower();
OutputStream metaOut = Channels.newOutputStream(channel);
metaOut.write(GenericIndexed.VERSION_TWO);
metaOut.write(objectsSorted ? 0x1 : 0x0);
metaOut.write(Ints.toByteArray(bagSizePower));
metaOut.write(Ints.toByteArray(Ints.checkedCast(numWritten)));
metaOut.write(Ints.toByteArray(fileNameByteArray.length));
metaOut.write(fileNameByteArray);
try (RandomAccessFile headerFile = new RandomAccessFile(ioPeon.getFile(makeFilename("headerLong")), "r")) {
Preconditions.checkNotNull(headerFile, "header file missing.");
long previousValuePosition = 0;
int bagSize = 1 << bagSizePower;
int numberOfFilesRequired = GenericIndexed.getNumberOfFilesRequired(bagSize, numWritten);
byte[] buffer = new byte[1 << 16];
try (InputStream is = new FileInputStream(ioPeon.getFile(makeFilename("values")))) {
int counter = -1;
for (int i = 0; i < numberOfFilesRequired; i++) {
if (i != numberOfFilesRequired - 1) {
// 8 for long bytes.
headerFile.seek((bagSize + counter) * Longs.BYTES);
counter = counter + bagSize;
} else {
// for remaining items.
headerFile.seek((numWritten - 1) * Longs.BYTES);
}
long valuePosition = Long.reverseBytes(headerFile.readLong());
long numBytesToPutInFile = valuePosition - previousValuePosition;
try (SmooshedWriter smooshChannel = smoosher.addWithSmooshedWriter(generateValueFileName(filenameBase, i), numBytesToPutInFile)) {
writeBytesIntoSmooshedChannel(numBytesToPutInFile, buffer, smooshChannel, is);
previousValuePosition = valuePosition;
}
}
}
writeHeaderLong(smoosher, headerFile, bagSizePower, buffer);
}
}
Aggregations