use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class TestSimpleExternalSort method outOfMemoryExternalSort.
private void outOfMemoryExternalSort(boolean testLegacy) throws Throwable {
FixtureBuilder builder = ClusterFixture.builder().configProperty("drill.memory.fragment.max", 50000000).configProperty("drill.memory.fragment.initial", 2000000).configProperty("drill.memory.operator.max", 30000000).configProperty("drill.memory.operator.initial", 2000000);
try (ClusterFixture cluster = builder.build();
ClientFixture client = cluster.clientFixture()) {
chooseImpl(client, testLegacy);
List<QueryDataBatch> results = client.queryBuilder().physicalResource("/xsort/oom_sort_test.json").results();
assertEquals(10000000, client.countResults(results));
long previousBigInt = Long.MAX_VALUE;
int recordCount = 0;
int batchCount = 0;
for (QueryDataBatch b : results) {
RecordBatchLoader loader = new RecordBatchLoader(client.allocator());
if (b.getHeader().getRowCount() > 0) {
batchCount++;
loader.load(b.getHeader().getDef(), b.getData());
@SuppressWarnings("resource") BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
for (int i = 0; i < c1.getAccessor().getValueCount(); i++) {
recordCount++;
assertTrue(String.format("%d < %d", previousBigInt, a1.get(i)), previousBigInt >= a1.get(i));
previousBigInt = a1.get(i);
}
assertTrue(String.format("%d == %d", a1.get(0), a1.get(a1.getValueCount() - 1)), a1.get(0) != a1.get(a1.getValueCount() - 1));
}
loader.clear();
b.release();
}
System.out.println(String.format("Sorted %,d records in %d batches.", recordCount, batchCount));
}
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class QueryBuilder method queryPlan.
/**
* Submit an "EXPLAIN" statement, and return the column value which
* contains the plan's string.
* <p>
* Cribbed from {@link PlanTestBase#getPlanInString(String, String)}
* @throws Exception if anything goes wrogn in the query
*/
protected String queryPlan(String columnName) throws Exception {
Preconditions.checkArgument(queryType == QueryType.SQL, "Can only explan an SQL query.");
final List<QueryDataBatch> results = results();
final RecordBatchLoader loader = new RecordBatchLoader(client.allocator());
final StringBuilder builder = new StringBuilder();
for (final QueryDataBatch b : results) {
if (!b.hasData()) {
continue;
}
loader.load(b.getHeader().getDef(), b.getData());
final VectorWrapper<?> vw;
try {
vw = loader.getValueAccessorById(NullableVarCharVector.class, loader.getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldIds());
} catch (Throwable t) {
throw new IllegalStateException("Looks like you did not provide an explain plan query, please add EXPLAIN PLAN FOR to the beginning of your query.");
}
@SuppressWarnings("resource") final ValueVector vv = vw.getValueVector();
for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
final Object o = vv.getAccessor().getObject(i);
builder.append(o);
}
loader.clear();
b.release();
}
return builder.toString();
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class MergingRecordBatch method createMerger.
// private boolean isOutgoingFull() {
// return outgoingPosition == DEFAULT_ALLOC_RECORD_COUNT;
// }
/**
* Creates a generate class which implements the copy and compare methods.
*
* @return instance of a new merger based on generated code
* @throws SchemaChangeException
*/
private MergingReceiverGeneratorBase createMerger() throws SchemaChangeException {
try {
final CodeGenerator<MergingReceiverGeneratorBase> cg = CodeGenerator.get(MergingReceiverGeneratorBase.TEMPLATE_DEFINITION, context.getFunctionRegistry(), context.getOptions());
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
// cg.saveCodeForDebugging(true);
final ClassGenerator<MergingReceiverGeneratorBase> g = cg.getRoot();
ExpandableHyperContainer batch = null;
boolean first = true;
for (final RecordBatchLoader loader : batchLoaders) {
if (first) {
batch = new ExpandableHyperContainer(loader);
first = false;
} else {
batch.addBatch(loader);
}
}
generateComparisons(g, batch);
g.setMappingSet(COPIER_MAPPING_SET);
CopyUtil.generateCopies(g, batch, true);
g.setMappingSet(MAIN_MAPPING);
final MergingReceiverGeneratorBase merger = context.getImplementationClass(cg);
merger.doSetup(context, batch, outgoingContainer);
return merger;
} catch (ClassTransformationException | IOException e) {
throw new SchemaChangeException(e);
}
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class MergingRecordBatch method close.
@Override
public void close() {
outgoingContainer.clear();
if (batchLoaders != null) {
for (final RecordBatchLoader rbl : batchLoaders) {
if (rbl != null) {
rbl.clear();
}
}
}
super.close();
}
use of org.apache.drill.exec.record.RecordBatchLoader in project drill by apache.
the class BaseTestQuery method printResult.
protected int printResult(List<QueryDataBatch> results) throws SchemaChangeException {
int rowCount = 0;
final RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
for (final QueryDataBatch result : results) {
rowCount += result.getHeader().getRowCount();
loader.load(result.getHeader().getDef(), result.getData());
// TODO: Clean: DRILL-2933: That load(...) no longer throws
// SchemaChangeException, so check/clean throw clause above.
VectorUtil.showVectorAccessibleContent(loader, columnWidths);
loader.clear();
result.release();
}
System.out.println("Total record count: " + rowCount);
return rowCount;
}
Aggregations