use of org.apache.carbondata.core.indexstore.ExtendedBlocklet in project carbondata by apache.
the class AndDataMapExprWrapper method prune.
@Override
public List<ExtendedBlocklet> prune(List<Segment> segments, List<PartitionSpec> partitionsToPrune) throws IOException {
List<ExtendedBlocklet> leftPrune = left.prune(segments, partitionsToPrune);
List<ExtendedBlocklet> rightPrune = right.prune(segments, partitionsToPrune);
List<ExtendedBlocklet> andBlocklets = new ArrayList<>();
for (ExtendedBlocklet blocklet : leftPrune) {
if (rightPrune.contains(blocklet)) {
andBlocklets.add(blocklet);
}
}
return andBlocklets;
}
use of org.apache.carbondata.core.indexstore.ExtendedBlocklet in project carbondata by apache.
the class TableDataMap method prune.
/**
* Pass the valid segments and prune the datamap using filter expression
*
* @param segments
* @param filterExp
* @return
*/
public List<ExtendedBlocklet> prune(List<Segment> segments, FilterResolverIntf filterExp, List<PartitionSpec> partitions) throws IOException {
List<ExtendedBlocklet> blocklets = new ArrayList<>();
SegmentProperties segmentProperties;
for (Segment segment : segments) {
List<Blocklet> pruneBlocklets = new ArrayList<>();
// if filter is not passed then return all the blocklets
if (filterExp == null) {
pruneBlocklets = blockletDetailsFetcher.getAllBlocklets(segment, partitions);
} else {
List<DataMap> dataMaps = dataMapFactory.getDataMaps(segment);
segmentProperties = segmentPropertiesFetcher.getSegmentProperties(segment);
for (DataMap dataMap : dataMaps) {
pruneBlocklets.addAll(dataMap.prune(filterExp, segmentProperties, partitions));
}
}
blocklets.addAll(addSegmentId(blockletDetailsFetcher.getExtendedBlocklets(pruneBlocklets, segment), segment.getSegmentNo()));
}
return blocklets;
}
use of org.apache.carbondata.core.indexstore.ExtendedBlocklet in project carbondata by apache.
the class TableDataMap method prune.
/**
* This method is used from any machine after it is distributed. It takes the distributable object
* to prune the filters.
*
* @param distributable
* @param filterExp
* @return
*/
public List<ExtendedBlocklet> prune(DataMapDistributable distributable, FilterResolverIntf filterExp, List<PartitionSpec> partitions) throws IOException {
List<ExtendedBlocklet> detailedBlocklets = new ArrayList<>();
List<Blocklet> blocklets = new ArrayList<>();
List<DataMap> dataMaps = dataMapFactory.getDataMaps(distributable);
for (DataMap dataMap : dataMaps) {
blocklets.addAll(dataMap.prune(filterExp, segmentPropertiesFetcher.getSegmentProperties(distributable.getSegment()), partitions));
}
BlockletSerializer serializer = new BlockletSerializer();
String writePath = identifier.getTablePath() + CarbonCommonConstants.FILE_SEPARATOR + dataMapSchema.getDataMapName();
if (dataMapFactory.getDataMapType() == DataMapLevel.FG) {
FileFactory.mkdirs(writePath, FileFactory.getFileType(writePath));
}
for (Blocklet blocklet : blocklets) {
ExtendedBlocklet detailedBlocklet = blockletDetailsFetcher.getExtendedBlocklet(blocklet, distributable.getSegment());
if (dataMapFactory.getDataMapType() == DataMapLevel.FG) {
String blockletwritePath = writePath + CarbonCommonConstants.FILE_SEPARATOR + System.nanoTime();
detailedBlocklet.setDataMapWriterPath(blockletwritePath);
serializer.serializeBlocklet((FineGrainBlocklet) blocklet, blockletwritePath);
}
detailedBlocklet.setSegmentId(distributable.getSegment().getSegmentNo());
detailedBlocklets.add(detailedBlocklet);
}
return detailedBlocklets;
}
use of org.apache.carbondata.core.indexstore.ExtendedBlocklet in project carbondata by apache.
the class AndDataMapExprWrapper method pruneBlocklets.
@Override
public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets) throws IOException {
List<ExtendedBlocklet> leftPrune = left.pruneBlocklets(blocklets);
List<ExtendedBlocklet> rightPrune = right.pruneBlocklets(blocklets);
List<ExtendedBlocklet> andBlocklets = new ArrayList<>();
for (ExtendedBlocklet blocklet : leftPrune) {
if (rightPrune.contains(blocklet)) {
andBlocklets.add(blocklet);
}
}
return andBlocklets;
}
use of org.apache.carbondata.core.indexstore.ExtendedBlocklet in project carbondata by apache.
the class BlockletDataMap method createBlocklet.
private ExtendedBlocklet createBlocklet(DataMapRow row, int blockletId) {
ExtendedBlocklet blocklet = new ExtendedBlocklet(new String(row.getByteArray(FILE_PATH_INDEX), CarbonCommonConstants.DEFAULT_CHARSET_CLASS), blockletId + "");
BlockletDetailInfo detailInfo = new BlockletDetailInfo();
detailInfo.setRowCount(row.getInt(ROW_COUNT_INDEX));
detailInfo.setPagesCount(row.getShort(PAGE_COUNT_INDEX));
detailInfo.setVersionNumber(row.getShort(VERSION_INDEX));
detailInfo.setBlockletId((short) blockletId);
detailInfo.setDimLens(columnCardinality);
detailInfo.setSchemaUpdatedTimeStamp(row.getLong(SCHEMA_UPADATED_TIME_INDEX));
byte[] byteArray = row.getByteArray(BLOCK_INFO_INDEX);
BlockletInfo blockletInfo = null;
try {
if (byteArray.length > 0) {
blockletInfo = new BlockletInfo();
ByteArrayInputStream stream = new ByteArrayInputStream(byteArray);
DataInputStream inputStream = new DataInputStream(stream);
blockletInfo.readFields(inputStream);
inputStream.close();
}
blocklet.setLocation(new String(row.getByteArray(LOCATIONS), CarbonCommonConstants.DEFAULT_CHARSET).split(","));
} catch (IOException e) {
throw new RuntimeException(e);
}
detailInfo.setBlockletInfo(blockletInfo);
blocklet.setDetailInfo(detailInfo);
detailInfo.setBlockFooterOffset(row.getLong(BLOCK_FOOTER_OFFSET));
detailInfo.setColumnSchemaBinary(getColumnSchemaBinary());
detailInfo.setBlockSize(row.getLong(BLOCK_LENGTH));
return blocklet;
}
Aggregations