use of org.apache.drill.exec.expr.fn.impl.ValueVectorHashHelper in project drill by apache.
the class HashJoinBatch method setupHash64.
private void setupHash64(HashTableConfig htConfig) {
LogicalExpression[] keyExprsBuild = new LogicalExpression[htConfig.getKeyExprsBuild().size()];
ErrorCollector collector = new ErrorCollectorImpl();
int i = 0;
for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), buildBatch, collector, context.getFunctionRegistry());
collector.reportErrors(logger);
if (expr == null) {
continue;
}
keyExprsBuild[i] = expr;
i++;
}
i = 0;
boolean missingField = false;
TypedFieldId[] buildSideTypeFieldIds = new TypedFieldId[keyExprsBuild.length];
for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
SchemaPath schemaPath = (SchemaPath) ne.getExpr();
TypedFieldId typedFieldId = buildBatch.getValueVectorId(schemaPath);
if (typedFieldId == null) {
missingField = true;
break;
}
buildSideTypeFieldIds[i] = typedFieldId;
i++;
}
if (missingField) {
logger.info("As some build side key fields not found, runtime filter was disabled");
enableRuntimeFilter = false;
return;
}
RuntimeFilterDef runtimeFilterDef = popConfig.getRuntimeFilterDef();
List<BloomFilterDef> bloomFilterDefs = runtimeFilterDef.getBloomFilterDefs();
for (BloomFilterDef bloomFilterDef : bloomFilterDefs) {
String buildField = bloomFilterDef.getBuildField();
SchemaPath schemaPath = new SchemaPath(new PathSegment.NameSegment(buildField), ExpressionPosition.UNKNOWN);
TypedFieldId typedFieldId = buildBatch.getValueVectorId(schemaPath);
if (typedFieldId == null) {
missingField = true;
break;
}
int fieldId = typedFieldId.getFieldIds()[0];
bloomFilterDef2buildId.put(bloomFilterDef, fieldId);
}
if (missingField) {
logger.info("As some build side join key fields not found, runtime filter was disabled");
enableRuntimeFilter = false;
return;
}
ValueVectorHashHelper hashHelper = new ValueVectorHashHelper(buildBatch, context);
try {
hash64 = hashHelper.getHash64(keyExprsBuild, buildSideTypeFieldIds);
} catch (Exception e) {
throw UserException.internalError(e).message("Failed to construct a field's hash64 dynamic codes").build(logger);
}
}
use of org.apache.drill.exec.expr.fn.impl.ValueVectorHashHelper in project drill by apache.
the class RuntimeFilterRecordBatch method setupHashHelper.
/**
* Takes care of setting up HashHelper if RuntimeFilter is received and the
* HashHelper is not already setup. For each schema change hash64 should be
* reset and this method needs to be called again.
*/
private void setupHashHelper() {
current = context.getRuntimeFilter(rfIdentifier);
if (current == null) {
return;
}
if (bloomFilters == null) {
bloomFilters = current.unwrap();
}
// Check if HashHelper is initialized or not
if (hash64 == null) {
ValueVectorHashHelper hashHelper = new ValueVectorHashHelper(incoming, context);
try {
// generate hash helper
this.toFilterFields = current.getRuntimeFilterBDef().getProbeFieldsList();
List<LogicalExpression> hashFieldExps = new ArrayList<>();
List<TypedFieldId> typedFieldIds = new ArrayList<>();
for (String toFilterField : toFilterFields) {
SchemaPath schemaPath = new SchemaPath(new PathSegment.NameSegment(toFilterField), ExpressionPosition.UNKNOWN);
TypedFieldId typedFieldId = container.getValueVectorId(schemaPath);
int[] fieldIds = typedFieldId.getFieldIds();
this.field2id.put(toFilterField, fieldIds[0]);
typedFieldIds.add(typedFieldId);
ValueVectorReadExpression toHashFieldExp = new ValueVectorReadExpression(typedFieldId);
hashFieldExps.add(toHashFieldExp);
}
hash64 = hashHelper.getHash64(hashFieldExps.toArray(new LogicalExpression[hashFieldExps.size()]), typedFieldIds.toArray(new TypedFieldId[typedFieldIds.size()]));
} catch (Exception e) {
throw UserException.internalError(e).build(logger);
}
}
}
use of org.apache.drill.exec.expr.fn.impl.ValueVectorHashHelper in project drill by apache.
the class BloomFilterTest method getHash64.
private static ValueVectorHashHelper.Hash64 getHash64(FragmentContext context, RowSet.SingleRowSet probeRowSet) throws ClassTransformationException, IOException, SchemaChangeException {
RecordBatch probeRecordBatch = new TestRecordBatch(probeRowSet.container());
TypedFieldId probeFieldId = probeRecordBatch.getValueVectorId(SchemaPath.getSimplePath("a"));
ValueVectorReadExpression probExp = new ValueVectorReadExpression(probeFieldId);
LogicalExpression[] probExpressions = new LogicalExpression[1];
probExpressions[0] = probExp;
TypedFieldId[] probeFieldIds = new TypedFieldId[1];
probeFieldIds[0] = probeFieldId;
ValueVectorHashHelper probeValueVectorHashHelper = new ValueVectorHashHelper(probeRecordBatch, context);
return probeValueVectorHashHelper.getHash64(probExpressions, probeFieldIds);
}
Aggregations