use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class TestValueVector method testVariableVectorReallocation.
@Test(expected = OversizedAllocationException.class)
public void testVariableVectorReallocation() {
final MaterializedField field = MaterializedField.create(EMPTY_SCHEMA_PATH, UInt4Holder.TYPE);
final VarCharVector vector = new VarCharVector(field, allocator);
// edge case 1: value count = MAX_VALUE_ALLOCATION
final int expectedAllocationInBytes = BaseValueVector.MAX_ALLOCATION_SIZE;
final int expectedOffsetSize = 10;
try {
vector.allocateNew(expectedAllocationInBytes, 10);
assertTrue(expectedOffsetSize <= vector.getValueCapacity());
assertTrue(expectedAllocationInBytes <= vector.getBuffer().capacity());
vector.reAlloc();
assertTrue(expectedOffsetSize * 2 <= vector.getValueCapacity());
assertTrue(expectedAllocationInBytes * 2 <= vector.getBuffer().capacity());
} finally {
vector.close();
}
// common: value count < MAX_VALUE_ALLOCATION
try {
vector.allocateNew(BaseValueVector.MAX_ALLOCATION_SIZE / 2, 0);
// value allocation reaches to MAX_VALUE_ALLOCATION
vector.reAlloc();
// this tests if it overflows
vector.reAlloc();
} finally {
vector.close();
}
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class TestValueVector method testNullableVarLen2.
@Test
public void testNullableVarLen2() {
final MaterializedField field = MaterializedField.create(EMPTY_SCHEMA_PATH, NullableVarCharHolder.TYPE);
// Create a new value vector for 1024 integers.
try (final NullableVarCharVector vector = new NullableVarCharVector(field, allocator)) {
final NullableVarCharVector.Mutator m = vector.getMutator();
vector.allocateNew(1024 * 10, 1024);
m.set(0, STR1);
m.set(1, STR2);
m.set(2, STR3);
// Check the sample strings.
final NullableVarCharVector.Accessor accessor = vector.getAccessor();
assertArrayEquals(STR1, accessor.get(0));
assertArrayEquals(STR2, accessor.get(1));
assertArrayEquals(STR3, accessor.get(2));
// Ensure null value throws.
boolean b = false;
try {
vector.getAccessor().get(3);
} catch (IllegalStateException e) {
b = true;
} finally {
assertTrue(b);
}
}
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class UnionAllRecordBatch method doWork.
@SuppressWarnings("resource")
private IterOutcome doWork() throws ClassTransformationException, IOException, SchemaChangeException {
if (allocationVectors != null) {
for (ValueVector v : allocationVectors) {
v.clear();
}
}
allocationVectors = Lists.newArrayList();
transfers.clear();
// If both sides of Union-All are empty
if (unionAllInput.isBothSideEmpty()) {
for (int i = 0; i < outputFields.size(); ++i) {
final String colName = outputFields.get(i).getPath();
final MajorType majorType = MajorType.newBuilder().setMinorType(MinorType.INT).setMode(DataMode.OPTIONAL).build();
MaterializedField outputField = MaterializedField.create(colName, majorType);
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
}
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
return IterOutcome.OK_NEW_SCHEMA;
}
final ClassGenerator<UnionAller> cg = CodeGenerator.getRoot(UnionAller.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
int index = 0;
for (VectorWrapper<?> vw : current) {
ValueVector vvIn = vw.getValueVector();
// get the original input column names
SchemaPath inputPath = SchemaPath.getSimplePath(vvIn.getField().getPath());
// get the renamed column names
SchemaPath outputPath = SchemaPath.getSimplePath(outputFields.get(index).getPath());
final ErrorCollector collector = new ErrorCollectorImpl();
// cast data types (Minortype or DataMode)
if (hasSameTypeAndMode(outputFields.get(index), vw.getValueVector().getField())) {
// Transfer column
MajorType outputFieldType = outputFields.get(index).getType();
MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), outputFieldType);
/*
todo: Fix if condition when DRILL-4824 is merged
If condition should be changed to:
`if (outputFields.get(index).getPath().equals(inputPath.getAsUnescapedPath())) {`
DRILL-5419 has changed condition to correct one but this caused regression (DRILL-5521).
Root cause is missing indication of child column in map types when it is null.
DRILL-4824 is re-working json reader implementation, including map types and will fix this problem.
Reverting condition to previous one to avoid regression till DRILL-4824 is merged.
Unit test - TestJsonReader.testKvgenWithUnionAll().
*/
if (outputFields.get(index).getPath().equals(inputPath)) {
ValueVector vvOut = container.addOrGet(outputField);
TransferPair tp = vvIn.makeTransferPair(vvOut);
transfers.add(tp);
// Copy data in order to rename the column
} else {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
ValueVector vv = container.addOrGet(outputField, callBack);
allocationVectors.add(vv);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
cg.addExpr(write);
}
// Cast is necessary
} else {
LogicalExpression expr = ExpressionTreeMaterializer.materialize(inputPath, current, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
// cast to the one with the least restriction
if (vvIn.getField().getType().getMode() == DataMode.REQUIRED && outputFields.get(index).getType().getMode() != DataMode.REQUIRED) {
expr = ExpressionTreeMaterializer.convertToNullableType(expr, vvIn.getField().getType().getMinorType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
// Insert a cast before the Union operation
if (vvIn.getField().getType().getMinorType() != outputFields.get(index).getType().getMinorType()) {
expr = ExpressionTreeMaterializer.addCastExpression(expr, outputFields.get(index).getType(), context.getFunctionRegistry(), collector);
if (collector.hasErrors()) {
throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema. Errors:\n %s.", collector.toErrorString()));
}
}
final MaterializedField outputField = MaterializedField.create(outputPath.getAsUnescapedPath(), expr.getMajorType());
ValueVector vector = container.addOrGet(outputField, callBack);
allocationVectors.add(vector);
TypedFieldId fid = container.getValueVectorId(SchemaPath.getSimplePath(outputField.getPath()));
boolean useSetSafe = !(vector instanceof FixedWidthVector);
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, useSetSafe);
cg.addExpr(write);
}
++index;
}
unionall = context.getImplementationClass(cg.getCodeGenerator());
unionall.setup(context, current, this, transfers);
if (!schemaAvailable) {
container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
schemaAvailable = true;
}
if (!doAlloc()) {
return IterOutcome.OUT_OF_MEMORY;
}
recordCount = unionall.unionRecords(0, current.getRecordCount(), 0);
setValueCount(recordCount);
return IterOutcome.OK;
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class ExternalSortBatch method constructHyperBatch.
private VectorContainer constructHyperBatch(List<BatchGroup> batchGroupList) {
VectorContainer cont = new VectorContainer();
for (MaterializedField field : schema) {
ValueVector[] vectors = new ValueVector[batchGroupList.size()];
int i = 0;
for (BatchGroup group : batchGroupList) {
vectors[i++] = group.getValueAccessorById(field.getValueClass(), group.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds()).getValueVector();
}
cont.add(vectors);
}
cont.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
return cont;
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class AbstractWriter method init.
@Override
public void init(OutputMutator output) throws SchemaChangeException {
MaterializedField mf = MaterializedField.create(field.getName(), type);
@SuppressWarnings("unchecked") Class<V> valueVectorClass = (Class<V>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
this.vector = output.addField(mf, valueVectorClass);
}
Aggregations