use of org.apache.hyracks.api.io.FileReference in project asterixdb by apache.
the class BufferCacheRegressionTest method flushBehaviorTest.
private void flushBehaviorTest(boolean deleteFile) throws IOException {
TestStorageManagerComponentHolder.init(PAGE_SIZE, 10, 1);
IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
IOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
FileReference firstFileRef = ioManager.resolve(fileName);
bufferCache.createFile(firstFileRef);
int firstFileId = fmp.lookupFileId(firstFileRef);
bufferCache.openFile(firstFileId);
// Fill the first page with known data and make it dirty by write
// latching it.
ICachedPage writePage = bufferCache.pin(BufferedFileHandle.getDiskPageId(firstFileId, 0), true);
writePage.acquireWriteLatch();
try {
ByteBuffer buf = writePage.getBuffer();
for (int i = 0; i < buf.capacity(); i++) {
buf.put(Byte.MAX_VALUE);
}
} finally {
writePage.releaseWriteLatch(true);
bufferCache.unpin(writePage);
}
bufferCache.closeFile(firstFileId);
if (deleteFile) {
bufferCache.deleteFile(firstFileId, false);
}
// Create a file with the same name.
FileReference secondFileRef = ioManager.resolve(fileName);
bufferCache.createFile(secondFileRef);
int secondFileId = fmp.lookupFileId(secondFileRef);
// This open will replace the firstFileRef's slot in the BufferCache,
// causing it's pages to be cleaned up. We want to make sure that those
// dirty pages are not flushed to the disk, because the file was
// declared as deleted, and
// somebody might be already using the same filename again (having been
// assigned a different fileId).
bufferCache.openFile(secondFileId);
// Manually open the file and inspect it's contents. We cannot simply
// ask the BufferCache to pin the page, because it would return the same
// physical memory again, and for performance reasons pages are never
// reset with 0's.
FileReference testFileRef = ioManager.resolve(fileName);
IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY, FileSyncMode.METADATA_SYNC_DATA_SYNC);
ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE + BufferCache.RESERVED_HEADER_BYTES);
ioManager.syncRead(testFileHandle, 0, testBuffer);
for (int i = BufferCache.RESERVED_HEADER_BYTES; i < testBuffer.capacity(); i++) {
if (deleteFile) {
// We deleted the file. We expect to see a clean buffer.
if (testBuffer.get(i) == Byte.MAX_VALUE) {
fail("Page 0 of deleted file was fazily flushed in openFile(), " + "corrupting the data of a newly created file with the same name.");
}
} else {
// Byte.MAX_VALUE.
if (testBuffer.get(i) != Byte.MAX_VALUE) {
fail("Page 0 of closed file was not flushed when properly, when reclaiming the file slot of fileId 0 in the BufferCache.");
}
}
}
ioManager.close(testFileHandle);
bufferCache.closeFile(secondFileId);
if (deleteFile) {
bufferCache.deleteFile(secondFileId, false);
}
bufferCache.close();
}
use of org.apache.hyracks.api.io.FileReference in project asterixdb by apache.
the class BufferCacheTest method simpleMaxOpenFilesTest.
@Test
public void simpleMaxOpenFilesTest() throws HyracksException {
TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
List<Integer> fileIds = new ArrayList<>();
for (int i = 0; i < MAX_OPEN_FILES; i++) {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
fileIds.add(fileId);
}
boolean exceptionThrown = false;
// since all files are open, next open should fail
try {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
} catch (HyracksDataException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
// close a random file
int ix = Math.abs(rnd.nextInt()) % fileIds.size();
bufferCache.closeFile(fileIds.get(ix));
fileIds.remove(ix);
// now open should succeed again
exceptionThrown = false;
try {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
fileIds.add(fileId);
} catch (HyracksDataException e) {
exceptionThrown = true;
}
Assert.assertFalse(exceptionThrown);
for (Integer i : fileIds) {
bufferCache.closeFile(i.intValue());
}
bufferCache.close();
}
use of org.apache.hyracks.api.io.FileReference in project asterixdb by apache.
the class BufferCacheTest method contentCheckingMaxOpenFilesTest.
@Test
public void contentCheckingMaxOpenFilesTest() throws HyracksException {
TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx.getJobletContext().getServiceContext());
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider();
IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
List<Integer> fileIds = new ArrayList<>();
Map<Integer, ArrayList<Integer>> pageContents = new HashMap<>();
int num = 10;
int testPageId = 0;
// open max number of files and write some stuff into their first page
for (int i = 0; i < MAX_OPEN_FILES; i++) {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
fileIds.add(fileId);
ICachedPage page = null;
page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
page.acquireWriteLatch();
try {
ArrayList<Integer> values = new ArrayList<>();
for (int j = 0; j < num; j++) {
int x = Math.abs(rnd.nextInt());
page.getBuffer().putInt(j * 4, x);
values.add(x);
}
pageContents.put(fileId, values);
} finally {
page.releaseWriteLatch(true);
bufferCache.unpin(page);
}
}
boolean exceptionThrown = false;
// since all files are open, next open should fail
try {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
} catch (HyracksDataException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
// close a few random files
ArrayList<Integer> closedFileIds = new ArrayList<>();
int filesToClose = 5;
for (int i = 0; i < filesToClose; i++) {
int ix = Math.abs(rnd.nextInt()) % fileIds.size();
bufferCache.closeFile(fileIds.get(ix));
closedFileIds.add(fileIds.get(ix));
fileIds.remove(ix);
}
// now open a few new files
for (int i = 0; i < filesToClose; i++) {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
fileIds.add(fileId);
}
// since all files are open, next open should fail
try {
String fileName = getFileName();
FileReference file = ioManager.resolve(fileName);
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
} catch (HyracksDataException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
// close a few random files again
for (int i = 0; i < filesToClose; i++) {
int ix = Math.abs(rnd.nextInt()) % fileIds.size();
bufferCache.closeFile(fileIds.get(ix));
closedFileIds.add(fileIds.get(ix));
fileIds.remove(ix);
}
// now open those closed files again and verify their contents
for (int i = 0; i < filesToClose; i++) {
int closedFileId = closedFileIds.get(i);
bufferCache.openFile(closedFileId);
fileIds.add(closedFileId);
// pin first page and verify contents
ICachedPage page = null;
page = bufferCache.pin(BufferedFileHandle.getDiskPageId(closedFileId, testPageId), false);
page.acquireReadLatch();
try {
ArrayList<Integer> values = pageContents.get(closedFileId);
for (int j = 0; j < values.size(); j++) {
Assert.assertEquals(values.get(j).intValue(), page.getBuffer().getInt(j * 4));
}
} finally {
page.releaseReadLatch();
bufferCache.unpin(page);
}
}
for (Integer i : fileIds) {
bufferCache.closeFile(i.intValue());
}
bufferCache.close();
}
use of org.apache.hyracks.api.io.FileReference in project asterixdb by apache.
the class AbstractIntegrationTest method createFile.
protected FileSplit createFile(NodeControllerService ncs) throws IOException {
String fileName = "f" + aInteger.getAndIncrement() + ".tmp";
FileReference fileRef = ncs.getIoManager().getFileReference(0, fileName);
FileUtils.deleteQuietly(fileRef.getFile());
fileRef.getFile().createNewFile();
outputFiles.add(fileRef.getFile());
return new ManagedFileSplit(ncs.getId(), fileName);
}
use of org.apache.hyracks.api.io.FileReference in project asterixdb by apache.
the class ExternalDatasetIndexesAbortOperatorDescriptor method performOpOnIndex.
@Override
protected void performOpOnIndex(IIndexDataflowHelper indexDataflowHelper, IHyracksTaskContext ctx) throws HyracksDataException {
String path = indexDataflowHelper.getResource().getPath();
IIOManager ioManager = ctx.getIoManager();
FileReference file = ioManager.resolve(path);
AbortRecoverLSMIndexFileManager fileManager = new AbortRecoverLSMIndexFileManager(ctx.getIoManager(), file);
fileManager.deleteTransactionFiles();
}
Aggregations