use of org.apache.drill.exec.record.TypedFieldId in project drill by apache.
the class TestEvaluationVisitor method x.
@Test
public void x() throws Exception {
DrillConfig c = DrillConfig.create();
FunctionImplementationRegistry reg = new FunctionImplementationRegistry(c);
EvaluationVisitor v = new EvaluationVisitor(reg);
CodeGenerator<?> g = CodeGenerator.get(Projector.TEMPLATE_DEFINITION, reg, null);
SchemaPath path = (SchemaPath) getExpr("a.b[4][2].c[6]");
TypedFieldId id = //
TypedFieldId.newBuilder().addId(//
1).addId(//
3).remainder(//
path.getRootSegment()).intermediateType(Types.optional(MinorType.MAP)).finalType(//
Types.repeated(MinorType.MAP)).hyper().withIndex().build();
ValueVectorReadExpression e = new ValueVectorReadExpression(id);
TypedFieldId outId = //
TypedFieldId.newBuilder().addId(//
1).finalType(//
Types.repeated(MinorType.MAP)).intermediateType(//
Types.repeated(MinorType.MAP)).build();
ValueVectorWriteExpression e2 = new ValueVectorWriteExpression(outId, e, true);
v.addExpr(e2, g.getRoot());
logger.debug(g.generateAndGet());
}
use of org.apache.drill.exec.record.TypedFieldId in project drill by apache.
the class HashJoinBatch method setupHashJoinProbe.
public HashJoinProbe setupHashJoinProbe() throws ClassTransformationException, IOException {
final CodeGenerator<HashJoinProbe> cg = CodeGenerator.get(HashJoinProbe.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.saveCodeForDebugging(true);
final ClassGenerator<HashJoinProbe> g = cg.getRoot();
// Generate the code to project build side records
g.setMappingSet(projectBuildMapping);
int fieldId = 0;
final JExpression buildIndex = JExpr.direct("buildIndex");
final JExpression outIndex = JExpr.direct("outIndex");
g.rotateBlock();
if (rightSchema != null) {
for (final MaterializedField field : rightSchema) {
final MajorType inputType = field.getType();
final MajorType outputType;
// not nullable so we must exclude them from the check below (see DRILL-2197).
if ((joinType == JoinRelType.LEFT || joinType == JoinRelType.FULL) && inputType.getMode() == DataMode.REQUIRED && inputType.getMinorType() != TypeProtos.MinorType.MAP) {
outputType = Types.overrideMode(inputType, DataMode.OPTIONAL);
} else {
outputType = inputType;
}
// make sure to project field with children for children to show up in the schema
final MaterializedField projected = field.withType(outputType);
// Add the vector to our output container
container.addOrGet(projected);
final JVar inVV = g.declareVectorValueSetupAndMember("buildBatch", new TypedFieldId(field.getType(), true, fieldId));
final JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(outputType, false, fieldId));
g.getEvalBlock().add(outVV.invoke("copyFromSafe").arg(buildIndex.band(JExpr.lit((int) Character.MAX_VALUE))).arg(outIndex).arg(inVV.component(buildIndex.shrz(JExpr.lit(16)))));
g.rotateBlock();
fieldId++;
}
}
// Generate the code to project probe side records
g.setMappingSet(projectProbeMapping);
int outputFieldId = fieldId;
fieldId = 0;
final JExpression probeIndex = JExpr.direct("probeIndex");
if (leftUpstream == IterOutcome.OK || leftUpstream == IterOutcome.OK_NEW_SCHEMA) {
for (final VectorWrapper<?> vv : left) {
final MajorType inputType = vv.getField().getType();
final MajorType outputType;
// not nullable so we must exclude them from the check below (see DRILL-2771, DRILL-2197).
if ((joinType == JoinRelType.RIGHT || joinType == JoinRelType.FULL) && inputType.getMode() == DataMode.REQUIRED && inputType.getMinorType() != TypeProtos.MinorType.MAP) {
outputType = Types.overrideMode(inputType, DataMode.OPTIONAL);
} else {
outputType = inputType;
}
final ValueVector v = container.addOrGet(MaterializedField.create(vv.getField().getPath(), outputType));
if (v instanceof AbstractContainerVector) {
vv.getValueVector().makeTransferPair(v);
v.clear();
}
final JVar inVV = g.declareVectorValueSetupAndMember("probeBatch", new TypedFieldId(inputType, false, fieldId));
final JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(outputType, false, outputFieldId));
g.getEvalBlock().add(outVV.invoke("copyFromSafe").arg(probeIndex).arg(outIndex).arg(inVV));
g.rotateBlock();
fieldId++;
outputFieldId++;
}
}
final HashJoinProbe hj = context.getImplementationClass(cg);
return hj;
}
use of org.apache.drill.exec.record.TypedFieldId in project drill by apache.
the class ExpressionTest method testSchemaExpression.
@Test
public void testSchemaExpression(@Injectable final RecordBatch batch) throws Exception {
final TypedFieldId tfid = new TypedFieldId(Types.optional(MinorType.BIGINT), false, 0);
new Expectations() {
{
batch.getValueVectorId(new SchemaPath("alpha", ExpressionPosition.UNKNOWN));
result = tfid;
// batch.getValueVectorById(tfid); result = new Fixed4(null, null);
}
};
System.out.println(getExpressionCode("1 + alpha", batch));
}
use of org.apache.drill.exec.record.TypedFieldId in project drill by apache.
the class UnionAllRecordBatch method doWork.
@SuppressWarnings("resource")
private IterOutcome doWork() throws ClassTransformationException, IOException, SchemaChangeException {
if (allocationVectors != null) {
for (ValueVector v : allocationVectors) {
v.clear();
}
}
allocationVectors = Lists.newArrayList();
transfers.clear();
// If both sides of Union-All are empty
if (unionAllInput.isBothSideEmpty()) {
for (int i = 0; i < outputFields.size(); ++i) {
final String colName = outputFields.get(i).getPath();
final MajorType majorType = MajorType.newBuilder().setMinorType(MinorType.INT).setMode(DataMode.OPTIONAL).build();
MaterializedField outputField = MaterializedField.create(colName, majorType);
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
}
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
return IterOutcome.OK_NEW_SCHEMA;
}
final ClassGenerator<UnionAller> cg = CodeGenerator.getRoot(UnionAller.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
int index = 0;
for (VectorWrapper<?> vw : current) {
ValueVector vvIn = vw.getValueVector();
// get the original input column names
SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
// get the renamed column names
SchemaPath outputPath = SchemaPath.getSimplePath(outputFields.get(index).getPath());
final ErrorCollector collector = new ErrorCollectorImpl();
// cast data types (Minortype or DataMode)
if (hasSameTypeAndMode(outputFields.get(index), vw.getValueVector().getField())) {
// Transfer column
MajorType outputFieldType = outputFields.get(index).getType();
MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), outputFieldType);
/*
todo: Fix if condition when DRILL-4824 is merged
If condition should be changed to:
`if (outputFields.get(index).getPath().equals(inputPath.getAsUnescapedPath())) {`
DRILL-5419 has changed condition to correct one but this caused regression (DRILL-5521).
Root cause is missing indication of child column in map types when it is null.
DRILL-4824 is re-working json reader implementation, including map types and will fix this problem.
Reverting condition to previous one to avoid regression till DRILL-4824 is merged.
Unit test - TestJsonReader.testKvgenWithUnionAll().
*/
if (outputFields.get(index).getPath().equals(inputPath)) {
ValueVector vvOut = container.addOrGet(outputField);
TransferPair tp = vvIn.makeTransferPair(vvOut);
transfers.add(tp);
// Copy data in order to rename the column
} else {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
cg.addExpr(write);
}
// Cast is necessary
} else {
LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
// cast to the one with the least restriction
if (vvIn.getField().getType().getMode() == DataMode.REQUIRED && outputFields.get(index).getType().getMode() != DataMode.REQUIRED) {
expr = ExpressionTreeMaterializer.convertToNullableType(expr, vvIn.getField().getType().getMinorType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
// Insert a cast before the Union operation
if (vvIn.getField().getType().getMinorType() != outputFields.get(index).getType().getMinorType()) {
expr = ExpressionTreeMaterializer.addCastExpression(expr, outputFields.get(index).getType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
final MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), expr.getMajorType());
ValueVector vector = container.addOrGet(outputField, callBack);
allocationVectors.add(vector);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
boolean useSetSafe = !(vector instanceof FixedWidthVector);
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
cg.addExpr(write);
}
++index;
}
unionall = context.getImplementationClass(cg.getCodeGenerator());
unionall.setup(context, current, this, transfers);
if (!schemaAvailable) {
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
schemaAvailable = true;
}
if (!doAlloc()) {
return IterOutcome.OUT_OF_MEMORY;
}
recordCount = unionall.unionRecords(0, current.getRecordCount(), 0);
setValueCount(recordCount);
return IterOutcome.OK;
}
use of org.apache.drill.exec.record.TypedFieldId in project drill by axbaretto.
the class DrillAggFuncHolder method declareWorkspaceVectors.
private JVar[] declareWorkspaceVectors(ClassGenerator<?> g) {
JVar[] workspaceJVars = new JVar[getWorkspaceVars().length];
for (int i = 0; i < getWorkspaceVars().length; i++) {
if (getWorkspaceVars()[i].isInject()) {
workspaceJVars[i] = g.declareClassField("work", g.getModel()._ref(getWorkspaceVars()[i].getType()));
g.getBlock(BlockType.SETUP).assign(workspaceJVars[i], g.getMappingSet().getIncoming().invoke("getContext").invoke("getManagedBuffer"));
} else {
Preconditions.checkState(Types.isFixedWidthType(getWorkspaceVars()[i].getMajorType()), String.format("Workspace variable '%s' in aggregation function '%s' is not allowed to " + "have variable length type.", getWorkspaceVars()[i].getName(), getRegisteredNames()[0]));
Preconditions.checkState(getWorkspaceVars()[i].getMajorType().getMode() == DataMode.REQUIRED, String.format("Workspace variable '%s' in aggregation function '%s' is not allowed" + " to have null or repeated type.", getWorkspaceVars()[i].getName(), getRegisteredNames()[0]));
// workspaceJVars[i] = g.declareClassField("work", g.getHolderType(workspaceVars[i].majorType), JExpr._new(g.getHolderType(workspaceVars[i].majorType)));
workspaceJVars[i] = g.declareClassField("work", g.getHolderType(getWorkspaceVars()[i].getMajorType()));
// Declare a workspace vector for the workspace var.
TypedFieldId typedFieldId = new TypedFieldId(getWorkspaceVars()[i].getMajorType(), g.getWorkspaceTypes().size());
JVar vv = g.declareVectorValueSetupAndMember(g.getMappingSet().getWorkspace(), typedFieldId);
g.getWorkspaceTypes().add(typedFieldId);
g.getWorkspaceVectors().put(getWorkspaceVars()[i], vv);
}
}
return workspaceJVars;
}
Aggregations