use of org.apache.carbondata.core.mutate.DeleteDeltaVo in project carbondata by apache.
the class CarbonDeleteFilesDataReader method getDeletedRowsDataVo.
/**
* Below method will be used to read the delete delta files
* and get the map of blockletid and page id mapping to deleted
* rows
*
* @param deltaFiles delete delta files array
* @return map of blockletid_pageid to deleted rows
*/
public Map<String, DeleteDeltaVo> getDeletedRowsDataVo(String[] deltaFiles) {
List<Future<DeleteDeltaBlockDetails>> taskSubmitList = new ArrayList<>();
ExecutorService executorService = Executors.newFixedThreadPool(thread_pool_size);
for (final String deltaFile : deltaFiles) {
taskSubmitList.add(executorService.submit(new DeleteDeltaFileReaderCallable(deltaFile)));
}
try {
executorService.shutdown();
executorService.awaitTermination(30, TimeUnit.MINUTES);
} catch (InterruptedException e) {
LOGGER.error("Error while reading the delete delta files : " + e.getMessage());
}
Map<String, DeleteDeltaVo> pageIdToBlockLetVo = new HashMap<>();
List<DeleteDeltaBlockletDetails> blockletDetails = null;
for (int i = 0; i < taskSubmitList.size(); i++) {
try {
blockletDetails = taskSubmitList.get(i).get().getBlockletDetails();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
for (DeleteDeltaBlockletDetails blockletDetail : blockletDetails) {
DeleteDeltaVo deleteDeltaVo = pageIdToBlockLetVo.get(blockletDetail.getBlockletKey());
if (null == deleteDeltaVo) {
deleteDeltaVo = new DeleteDeltaVo();
pageIdToBlockLetVo.put(blockletDetail.getBlockletKey(), deleteDeltaVo);
}
deleteDeltaVo.insertData(blockletDetail.getDeletedRows());
}
}
return pageIdToBlockLetVo;
}
use of org.apache.carbondata.core.mutate.DeleteDeltaVo in project carbondata by apache.
the class AbstractDetailQueryResultIterator method intialiseInfos.
private void intialiseInfos() {
for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
Map<String, DeleteDeltaVo> deletedRowsMap = null;
DataRefNodeFinder finder = new BTreeDataRefNodeFinder(blockInfo.getEachColumnValueSize(), blockInfo.getDataBlock().getSegmentProperties().getNumberOfSortColumns(), blockInfo.getDataBlock().getSegmentProperties().getNumberOfNoDictSortColumns());
// if delete delta file is present
if (null != blockInfo.getDeleteDeltaFilePath() && 0 != blockInfo.getDeleteDeltaFilePath().length) {
DeleteDeltaInfo deleteDeltaInfo = new DeleteDeltaInfo(blockInfo.getDeleteDeltaFilePath());
// read and get the delete detail block details
deletedRowsMap = getDeleteDeltaDetails(blockInfo.getDataBlock(), deleteDeltaInfo);
// set the deleted row to block execution info
blockInfo.setDeletedRecordsMap(deletedRowsMap);
}
DataRefNode dataRefNode = blockInfo.getDataBlock().getDataRefNode();
if (dataRefNode instanceof BlockletDataRefNode) {
BlockletDataRefNode node = (BlockletDataRefNode) dataRefNode;
blockInfo.setFirstDataBlock(node);
blockInfo.setNumberOfBlockToScan(node.numberOfNodes());
} else {
DataRefNode startDataBlock = finder.findFirstDataBlock(dataRefNode, blockInfo.getStartKey());
while (startDataBlock.nodeIndex() < blockInfo.getStartBlockletIndex()) {
startDataBlock = startDataBlock.getNextDataRefNode();
}
long numberOfBlockToScan = blockInfo.getNumberOfBlockletToScan();
// if number of block is less than 0 then take end block.
if (numberOfBlockToScan <= 0) {
DataRefNode endDataBlock = finder.findLastDataBlock(dataRefNode, blockInfo.getEndKey());
numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
}
blockInfo.setFirstDataBlock(startDataBlock);
blockInfo.setNumberOfBlockToScan(numberOfBlockToScan);
}
}
}
use of org.apache.carbondata.core.mutate.DeleteDeltaVo in project carbondata by apache.
the class AbstractDetailQueryResultIterator method getDeleteDeltaDetails.
/**
* Below method will be used to get the delete delta rows for a block
*
* @param dataBlock data block
* @param deleteDeltaInfo delete delta info
* @return blockid+pageid to deleted row mapping
*/
private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock, DeleteDeltaInfo deleteDeltaInfo) {
// then return the current deleted rows
if (dataBlock.getDeleteDeltaTimestamp() >= deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp()) {
return dataBlock.getDeletedRowsMap();
}
CarbonDeleteFilesDataReader carbonDeleteDeltaFileReader = null;
// get the lock object so in case of concurrent query only one task will read the delete delta
// files other tasks will wait
Object lockObject = deleteDeltaToLockObjectMap.get(deleteDeltaInfo);
// if lock object is null then add a lock object
if (null == lockObject) {
synchronized (deleteDeltaToLockObjectMap) {
// double checking
lockObject = deleteDeltaToLockObjectMap.get(deleteDeltaInfo);
if (null == lockObject) {
lockObject = new Object();
deleteDeltaToLockObjectMap.put(deleteDeltaInfo, lockObject);
}
}
}
// double checking to check the deleted rows is already present or not
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp()) {
// if not then acquire the lock
synchronized (lockObject) {
// check the timestamp again
if (dataBlock.getDeleteDeltaTimestamp() < deleteDeltaInfo.getLatestDeleteDeltaFileTimestamp()) {
// read the delete delta files
carbonDeleteDeltaFileReader = new CarbonDeleteFilesDataReader();
Map<String, DeleteDeltaVo> deletedRowsMap = carbonDeleteDeltaFileReader.getDeletedRowsDataVo(deleteDeltaInfo.getDeleteDeltaFile());
setDeltedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
// remove the lock
deleteDeltaToLockObjectMap.remove(deleteDeltaInfo);
return deletedRowsMap;
} else {
return dataBlock.getDeletedRowsMap();
}
}
} else {
return dataBlock.getDeletedRowsMap();
}
}
Aggregations