use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class TestOptiqPlans method testFilterString.
@Test
public void testFilterString() throws Exception {
final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
try (final Drillbit bit1 = new Drillbit(config, serviceSet);
final DrillClient client = new DrillClient(config, serviceSet.getCoordinator())) {
bit1.run();
client.connect();
final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.LOGICAL, Resources.toString(Resources.getResource("logical_string_filter.json"), Charsets.UTF_8));
final RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
for (final QueryDataBatch b : results) {
System.out.println(String.format("Got %d results", b.getHeader().getRowCount()));
loader.load(b.getHeader().getDef(), b.getData());
for (final VectorWrapper<?> vw : loader) {
System.out.println(vw.getValueVector().getField().getPath());
final ValueVector vv = vw.getValueVector();
for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
final Object o = vv.getAccessor().getObject(i);
if (vv instanceof VarBinaryVector) {
final VarBinaryVector.Accessor x = ((VarBinaryVector) vv).getAccessor();
final VarBinaryHolder vbh = new VarBinaryHolder();
x.get(i, vbh);
System.out.printf("%d..%d", vbh.start, vbh.end);
System.out.println("[" + new String((byte[]) vv.getAccessor().getObject(i)) + "]");
} else {
System.out.println(vv.getAccessor().getObject(i));
}
}
}
loader.clear();
b.release();
}
client.close();
}
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class HiveAbstractReader method populatePartitionVectors.
protected void populatePartitionVectors(int recordCount) {
for (int i = 0; i < pVectors.size(); i++) {
final ValueVector vector = pVectors.get(i);
final Object val = selectedPartitionValues.get(i);
AllocationHelper.allocateNew(vector, recordCount);
if (val != null) {
HiveUtilities.populateVector(vector, managedBuffer, val, 0, recordCount);
}
vector.getMutator().setValueCount(recordCount);
}
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class ChainedHashTable method createAndSetupHashTable.
public HashTable createAndSetupHashTable(TypedFieldId[] outKeyFieldIds) throws ClassTransformationException, IOException, SchemaChangeException {
CodeGenerator<HashTable> top = CodeGenerator.get(HashTable.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
top.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// This code is called from generated code, so to step into this code,
// persist the code generated in HashAggBatch also.
// top.saveCodeForDebugging(true);
ClassGenerator<HashTable> cg = top.getRoot();
ClassGenerator<HashTable> cgInner = cg.getInnerGenerator("BatchHolder");
LogicalExpression[] keyExprsBuild = new LogicalExpression[htConfig.getKeyExprsBuild().size()];
LogicalExpression[] keyExprsProbe = null;
boolean isProbe = (htConfig.getKeyExprsProbe() != null);
if (isProbe) {
keyExprsProbe = new LogicalExpression[htConfig.getKeyExprsProbe().size()];
}
ErrorCollector collector = new ErrorCollectorImpl();
// original ht container from which others may be cloned
VectorContainer htContainerOrig = new VectorContainer();
LogicalExpression[] htKeyExprs = new LogicalExpression[htConfig.getKeyExprsBuild().size()];
TypedFieldId[] htKeyFieldIds = new TypedFieldId[htConfig.getKeyExprsBuild().size()];
int i = 0;
for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingBuild, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
if (expr == null) {
continue;
}
keyExprsBuild[i] = expr;
i++;
}
if (isProbe) {
i = 0;
for (NamedExpression ne : htConfig.getKeyExprsProbe()) {
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incomingProbe, collector, context.getFunctionRegistry());
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
if (expr == null) {
continue;
}
keyExprsProbe[i] = expr;
i++;
}
JoinUtils.addLeastRestrictiveCasts(keyExprsProbe, incomingProbe, keyExprsBuild, incomingBuild, context);
}
i = 0;
/*
* Once the implicit casts have been added, create the value vectors for the corresponding
* type and add it to the hash table's container.
* Note: Adding implicit casts may have a minor impact on the memory foot print. For example
* if we have a join condition with bigint on the probe side and int on the build side then
* after this change we will be allocating a bigint vector in the hashtable instead of an int
* vector.
*/
for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
LogicalExpression expr = keyExprsBuild[i];
final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
@SuppressWarnings("resource") ValueVector vv = TypeHelper.getNewVector(outputField, allocator);
htKeyFieldIds[i] = htContainerOrig.add(vv);
i++;
}
// generate code for isKeyMatch(), setValue(), getHash() and outputRecordKeys()
setupIsKeyMatchInternal(cgInner, KeyMatchIncomingBuildMapping, KeyMatchHtableMapping, keyExprsBuild, htConfig.getComparators(), htKeyFieldIds);
setupIsKeyMatchInternal(cgInner, KeyMatchIncomingProbeMapping, KeyMatchHtableProbeMapping, keyExprsProbe, htConfig.getComparators(), htKeyFieldIds);
setupSetValue(cgInner, keyExprsBuild, htKeyFieldIds);
if (outgoing != null) {
if (outKeyFieldIds.length > htConfig.getKeyExprsBuild().size()) {
throw new IllegalArgumentException("Mismatched number of output key fields.");
}
}
setupOutputRecordKeys(cgInner, htKeyFieldIds, outKeyFieldIds);
setupGetHash(cg, /* use top level code generator for getHash */
GetHashIncomingBuildMapping, incomingBuild, keyExprsBuild, false);
setupGetHash(cg, /* use top level code generator for getHash */
GetHashIncomingProbeMapping, incomingProbe, keyExprsProbe, true);
HashTable ht = context.getImplementationClass(top);
ht.setup(htConfig, context, allocator, incomingBuild, incomingProbe, outgoing, htContainerOrig);
return ht;
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class StreamingAggBatch method createAggregatorInternal.
private StreamingAggregator createAggregatorInternal() throws SchemaChangeException, ClassTransformationException, IOException {
ClassGenerator<StreamingAggregator> cg = CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.getCodeGenerator().plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.getCodeGenerator().saveCodeForDebugging(true);
container.clear();
LogicalExpression[] keyExprs = new LogicalExpression[popConfig.getKeys().size()];
LogicalExpression[] valueExprs = new LogicalExpression[popConfig.getExprs().size()];
TypedFieldId[] keyOutputIds = new TypedFieldId[popConfig.getKeys().size()];
ErrorCollector collector = new ErrorCollectorImpl();
for (int i = 0; i < keyExprs.length; i++) {
final NamedExpression ne = popConfig.getKeys().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr == null) {
continue;
}
keyExprs[i] = expr;
final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
@SuppressWarnings("resource") final ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
keyOutputIds[i] = container.add(vector);
}
for (int i = 0; i < valueExprs.length; i++) {
final NamedExpression ne = popConfig.getExprs().get(i);
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(ne.getExpr(), incoming, collector, context.getFunctionRegistry());
if (expr instanceof IfExpression) {
throw UserException.unsupportedError(new UnsupportedOperationException("Union type not supported in aggregate functions")).build(logger);
}
if (expr == null) {
continue;
}
final MaterializedField outputField = MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
@SuppressWarnings("resource") ValueVector vector = TypeHelper.getNewVector(outputField, oContext.getAllocator());
TypedFieldId id = container.add(vector);
valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
}
if (collector.hasErrors()) {
throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
}
setupIsSame(cg, keyExprs);
setupIsSameApart(cg, keyExprs);
addRecordValues(cg, valueExprs);
outputRecordKeys(cg, keyOutputIds, keyExprs);
outputRecordKeysPrev(cg, keyOutputIds, keyExprs);
cg.getBlock("resetValues")._return(JExpr.TRUE);
getIndex(cg);
container.buildSchema(SelectionVectorMode.NONE);
StreamingAggregator agg = context.getImplementationClass(cg);
agg.setup(oContext, incoming, this);
return agg;
}
use of org.apache.drill.exec.vector.ValueVector in project drill by apache.
the class ScanBatch method addImplicitVectors.
private void addImplicitVectors() {
try {
if (implicitVectors != null) {
for (ValueVector v : implicitVectors.values()) {
v.clear();
}
}
implicitVectors = Maps.newHashMap();
if (implicitValues != null) {
for (String column : implicitValues.keySet()) {
final MaterializedField field = MaterializedField.create(column, Types.optional(MinorType.VARCHAR));
@SuppressWarnings("resource") final ValueVector v = mutator.addField(field, NullableVarCharVector.class);
implicitVectors.put(column, v);
}
}
} catch (SchemaChangeException e) {
// No exception should be thrown here.
throw UserException.systemError(e).addContext("Failure while allocating implicit vectors").build(logger);
}
}
Aggregations