use of org.apache.drill.exec.store.hive.HiveScan in project drill by apache.
the class HivePushPartitionFilterIntoScan method getFilterOnProject.
public static final StoragePluginOptimizerRule getFilterOnProject(OptimizerRulesContext optimizerRulesContext, final String defaultPartitionValue) {
return new PruneScanRule(RelOptHelper.some(DrillFilterRel.class, RelOptHelper.some(DrillProjectRel.class, RelOptHelper.any(DrillScanRel.class))), "HivePushPartitionFilterIntoScan:Filter_On_Project_Hive", optimizerRulesContext) {
@Override
public PartitionDescriptor getPartitionDescriptor(PlannerSettings settings, TableScan scanRel) {
return new HivePartitionDescriptor(settings, (DrillScanRel) scanRel, getOptimizerRulesContext().getManagedBuffer(), defaultPartitionValue);
}
@Override
public boolean matches(RelOptRuleCall call) {
final DrillScanRel scan = (DrillScanRel) call.rel(2);
GroupScan groupScan = scan.getGroupScan();
// this rule is applicable only for Hive based partition pruning
if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) {
return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown();
} else {
return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown();
}
}
@Override
public void onMatch(RelOptRuleCall call) {
final DrillFilterRel filterRel = call.rel(0);
final DrillProjectRel projectRel = call.rel(1);
final DrillScanRel scanRel = call.rel(2);
doOnMatch(call, filterRel, projectRel, scanRel);
}
};
}
use of org.apache.drill.exec.store.hive.HiveScan in project drill by apache.
the class HivePushPartitionFilterIntoScan method getFilterOnScan.
public static final StoragePluginOptimizerRule getFilterOnScan(OptimizerRulesContext optimizerRulesContext, final String defaultPartitionValue) {
return new PruneScanRule(RelOptHelper.some(DrillFilterRel.class, RelOptHelper.any(DrillScanRel.class)), "HivePushPartitionFilterIntoScan:Filter_On_Scan_Hive", optimizerRulesContext) {
@Override
public PartitionDescriptor getPartitionDescriptor(PlannerSettings settings, TableScan scanRel) {
return new HivePartitionDescriptor(settings, (DrillScanRel) scanRel, getOptimizerRulesContext().getManagedBuffer(), defaultPartitionValue);
}
@Override
public boolean matches(RelOptRuleCall call) {
final DrillScanRel scan = (DrillScanRel) call.rel(1);
GroupScan groupScan = scan.getGroupScan();
// this rule is applicable only for Hive based partition pruning
if (PrelUtil.getPlannerSettings(scan.getCluster().getPlanner()).isHepPartitionPruningEnabled()) {
return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown() && !scan.partitionFilterPushdown();
} else {
return groupScan instanceof HiveScan && groupScan.supportsPartitionFilterPushdown();
}
}
@Override
public void onMatch(RelOptRuleCall call) {
final DrillFilterRel filterRel = call.rel(0);
final DrillScanRel scanRel = call.rel(1);
doOnMatch(call, filterRel, null, scanRel);
}
};
}
use of org.apache.drill.exec.store.hive.HiveScan in project drill by apache.
the class HivePartitionDescriptor method createNewGroupScan.
private GroupScan createNewGroupScan(List<PartitionLocation> newPartitionLocations) throws ExecutionSetupException {
HiveScan hiveScan = (HiveScan) scanRel.getGroupScan();
HiveReadEntry origReadEntry = hiveScan.hiveReadEntry;
List<HiveTableWrapper.HivePartitionWrapper> oldPartitions = origReadEntry.partitions;
List<HiveTableWrapper.HivePartitionWrapper> newPartitions = Lists.newLinkedList();
for (HiveTableWrapper.HivePartitionWrapper part : oldPartitions) {
String partitionLocation = part.getPartition().getSd().getLocation();
for (PartitionLocation newPartitionLocation : newPartitionLocations) {
if (partitionLocation.equals(newPartitionLocation.getEntirePartitionLocation())) {
newPartitions.add(part);
}
}
}
HiveReadEntry newReadEntry = new HiveReadEntry(origReadEntry.table, newPartitions);
return hiveScan.clone(newReadEntry);
}
use of org.apache.drill.exec.store.hive.HiveScan in project drill by apache.
the class HivePartitionDescriptor method populatePartitionVectors.
@Override
public void populatePartitionVectors(ValueVector[] vectors, List<PartitionLocation> partitions, BitSet partitionColumnBitSet, Map<Integer, String> fieldNameMap) {
int record = 0;
final HiveScan hiveScan = (HiveScan) scanRel.getGroupScan();
final Map<String, String> partitionNameTypeMap = hiveScan.hiveReadEntry.table.getPartitionNameTypeMap();
for (PartitionLocation partitionLocation : partitions) {
for (int partitionColumnIndex : BitSets.toIter(partitionColumnBitSet)) {
final String hiveType = partitionNameTypeMap.get(fieldNameMap.get(partitionColumnIndex));
final Object value = HiveUtilities.convertPartitionType(TypeInfoUtils.getTypeInfoFromTypeString(hiveType), partitionLocation.getPartitionValue(partitionColumnIndex), defaultPartitionValue);
if (value != null) {
HiveUtilities.populateVector(vectors[partitionColumnIndex], managedBuffer, value, record, record + 1);
}
}
record++;
}
for (ValueVector v : vectors) {
if (v == null) {
continue;
}
v.getMutator().setValueCount(partitions.size());
}
}
use of org.apache.drill.exec.store.hive.HiveScan in project drill by apache.
the class HivePartitionDescriptor method createPartitionSublists.
@Override
protected void createPartitionSublists() {
List<PartitionLocation> locations = new LinkedList<>();
HiveReadEntry origEntry = ((HiveScan) scanRel.getGroupScan()).hiveReadEntry;
for (Partition partition : origEntry.getPartitions()) {
locations.add(new HivePartitionLocation(partition.getValues(), partition.getSd().getLocation()));
}
locationSuperList = Lists.partition(locations, PartitionDescriptor.PARTITION_BATCH_SIZE);
sublistsCreated = true;
}
Aggregations