use of org.apache.carbondata.core.datamap.dev.DataMap in project carbondata by apache.
the class BlockletDataMapFactory method clear.
@Override
public void clear(Segment segment) {
List<TableBlockIndexUniqueIdentifier> blockIndexes = segmentMap.remove(segment.getSegmentNo());
if (blockIndexes != null) {
for (TableBlockIndexUniqueIdentifier blockIndex : blockIndexes) {
DataMap dataMap = cache.getIfPresent(blockIndex);
if (dataMap != null) {
cache.invalidate(blockIndex);
dataMap.clear();
}
}
}
}
use of org.apache.carbondata.core.datamap.dev.DataMap in project carbondata by apache.
the class TableDataMap method prune.
/**
* Pass the valid segments and prune the datamap using filter expression
*
* @param segments
* @param filterExp
* @return
*/
public List<ExtendedBlocklet> prune(List<Segment> segments, FilterResolverIntf filterExp, List<PartitionSpec> partitions) throws IOException {
List<ExtendedBlocklet> blocklets = new ArrayList<>();
SegmentProperties segmentProperties;
for (Segment segment : segments) {
List<Blocklet> pruneBlocklets = new ArrayList<>();
// if filter is not passed then return all the blocklets
if (filterExp == null) {
pruneBlocklets = blockletDetailsFetcher.getAllBlocklets(segment, partitions);
} else {
List<DataMap> dataMaps = dataMapFactory.getDataMaps(segment);
segmentProperties = segmentPropertiesFetcher.getSegmentProperties(segment);
for (DataMap dataMap : dataMaps) {
pruneBlocklets.addAll(dataMap.prune(filterExp, segmentProperties, partitions));
}
}
blocklets.addAll(addSegmentId(blockletDetailsFetcher.getExtendedBlocklets(pruneBlocklets, segment), segment.getSegmentNo()));
}
return blocklets;
}
use of org.apache.carbondata.core.datamap.dev.DataMap in project carbondata by apache.
the class TableDataMap method prune.
/**
* This method is used from any machine after it is distributed. It takes the distributable object
* to prune the filters.
*
* @param distributable
* @param filterExp
* @return
*/
public List<ExtendedBlocklet> prune(DataMapDistributable distributable, FilterResolverIntf filterExp, List<PartitionSpec> partitions) throws IOException {
List<ExtendedBlocklet> detailedBlocklets = new ArrayList<>();
List<Blocklet> blocklets = new ArrayList<>();
List<DataMap> dataMaps = dataMapFactory.getDataMaps(distributable);
for (DataMap dataMap : dataMaps) {
blocklets.addAll(dataMap.prune(filterExp, segmentPropertiesFetcher.getSegmentProperties(distributable.getSegment()), partitions));
}
BlockletSerializer serializer = new BlockletSerializer();
String writePath = identifier.getTablePath() + CarbonCommonConstants.FILE_SEPARATOR + dataMapSchema.getDataMapName();
if (dataMapFactory.getDataMapType() == DataMapLevel.FG) {
FileFactory.mkdirs(writePath, FileFactory.getFileType(writePath));
}
for (Blocklet blocklet : blocklets) {
ExtendedBlocklet detailedBlocklet = blockletDetailsFetcher.getExtendedBlocklet(blocklet, distributable.getSegment());
if (dataMapFactory.getDataMapType() == DataMapLevel.FG) {
String blockletwritePath = writePath + CarbonCommonConstants.FILE_SEPARATOR + System.nanoTime();
detailedBlocklet.setDataMapWriterPath(blockletwritePath);
serializer.serializeBlocklet((FineGrainBlocklet) blocklet, blockletwritePath);
}
detailedBlocklet.setSegmentId(distributable.getSegment().getSegmentNo());
detailedBlocklets.add(detailedBlocklet);
}
return detailedBlocklets;
}
Aggregations