use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class ExternalSortBatch method constructHyperBatch.
private VectorContainer constructHyperBatch(List<BatchGroup> batchGroupList) {
VectorContainer cont = new VectorContainer();
for (MaterializedField field : schema) {
ValueVector[] vectors = new ValueVector[batchGroupList.size()];
int i = 0;
for (BatchGroup group : batchGroupList) {
vectors[i++] = group.getValueAccessorById(field.getValueClass(), group.getValueVectorId(SchemaPath.getSimplePath(field.getPath())).getFieldIds()).getValueVector();
}
cont.add(vectors);
}
cont.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
return cont;
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class AbstractWriter method init.
@Override
public void init(OutputMutator output) throws SchemaChangeException {
MaterializedField mf = MaterializedField.create(field.getName(), type);
@SuppressWarnings("unchecked") Class<V> valueVectorClass = (Class<V>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
this.vector = output.addField(mf, valueVectorClass);
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class TestMergingReceiver method testMultipleProvidersMixedSizes.
@Test
public void testMultipleProvidersMixedSizes() throws Exception {
@SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
final Drillbit bit2 = new Drillbit(CONFIG, serviceSet);
final DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
bit1.run();
bit2.run();
client.connect();
final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile("/mergerecv/multiple_providers.json"), Charsets.UTF_8));
int count = 0;
final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
// print the results
Long lastBlueValue = null;
for (final QueryDataBatch b : results) {
final QueryData queryData = b.getHeader();
final int batchRowCount = queryData.getRowCount();
count += batchRowCount;
batchLoader.load(queryData.getDef(), b.getData());
for (final VectorWrapper<?> vw : batchLoader) {
@SuppressWarnings("resource") final ValueVector vv = vw.getValueVector();
final ValueVector.Accessor va = vv.getAccessor();
final MaterializedField materializedField = vv.getField();
final int numValues = va.getValueCount();
for (int valueIdx = 0; valueIdx < numValues; ++valueIdx) {
if (materializedField.getPath().equals("blue")) {
final long longValue = ((Long) va.getObject(valueIdx)).longValue();
// check that order is ascending
if (lastBlueValue != null) {
assertTrue(longValue >= lastBlueValue);
}
lastBlueValue = longValue;
}
}
}
b.release();
batchLoader.clear();
}
assertEquals(400000, count);
}
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class TestSplitAndTransfer method test.
@Test
public void test() throws Exception {
final DrillConfig drillConfig = DrillConfig.create();
final BufferAllocator allocator = RootAllocatorFactory.newRoot(drillConfig);
final MaterializedField field = MaterializedField.create("field", Types.optional(MinorType.VARCHAR));
final NullableVarCharVector varCharVector = new NullableVarCharVector(field, allocator);
varCharVector.allocateNew(10000, 1000);
final int valueCount = 500;
final String[] compareArray = new String[valueCount];
final NullableVarCharVector.Mutator mutator = varCharVector.getMutator();
for (int i = 0; i < valueCount; i += 3) {
final String s = String.format("%010d", i);
mutator.set(i, s.getBytes());
compareArray[i] = s;
}
mutator.setValueCount(valueCount);
final TransferPair tp = varCharVector.getTransferPair(allocator);
final NullableVarCharVector newVarCharVector = (NullableVarCharVector) tp.getTo();
final Accessor accessor = newVarCharVector.getAccessor();
final int[][] startLengths = { { 0, 201 }, { 201, 200 }, { 401, 99 } };
for (final int[] startLength : startLengths) {
final int start = startLength[0];
final int length = startLength[1];
tp.splitAndTransfer(start, length);
newVarCharVector.getMutator().setValueCount(length);
for (int i = 0; i < length; i++) {
final boolean expectedSet = ((start + i) % 3) == 0;
if (expectedSet) {
final byte[] expectedValue = compareArray[start + i].getBytes();
assertFalse(accessor.isNull(i));
assertArrayEquals(expectedValue, accessor.get(i));
} else {
assertTrue(accessor.isNull(i));
}
}
newVarCharVector.clear();
}
varCharVector.close();
allocator.close();
}
use of org.apache.drill.exec.record.MaterializedField in project drill by apache.
the class MapVector method load.
@Override
public void load(SerializedField metadata, DrillBuf buf) {
final List<SerializedField> fields = metadata.getChildList();
valueCount = metadata.getValueCount();
int bufOffset = 0;
for (final SerializedField child : fields) {
final MaterializedField fieldDef = MaterializedField.create(child);
ValueVector vector = getChild(fieldDef.getLastName());
if (vector == null) {
// if we arrive here, we didn't have a matching vector.
vector = BasicTypeHelper.getNewVector(fieldDef, allocator);
putChild(fieldDef.getLastName(), vector);
}
if (child.getValueCount() == 0) {
vector.clear();
} else {
vector.load(child, buf.slice(bufOffset, child.getBufferLength()));
}
bufOffset += child.getBufferLength();
}
assert bufOffset == buf.writerIndex();
}
Aggregations