use of org.apache.drill.exec.ops.FragmentContext in project drill by apache.
the class TestNewMathFunctions method runTest.
public void runTest(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection, Object[] expectedResults, String planPath) throws Throwable {
mockDrillbitContext(bitContext);
final String planString = Resources.toString(Resources.getResource(planPath), Charsets.UTF_8);
if (reader == null) {
reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
}
if (registry == null) {
registry = new FunctionImplementationRegistry(c);
}
if (context == null) {
//new FragmentContext(bitContext, ExecProtos.FragmentHandle.getDefaultInstance(), connection, registry);
context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
}
final PhysicalPlan plan = reader.readPhysicalPlan(planString);
final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
// skip schema batch
exec.next();
while (exec.next()) {
final Object[] res = getRunResult(exec);
assertEquals("return count does not match", expectedResults.length, res.length);
for (int i = 0; i < res.length; i++) {
assertEquals(String.format("column %s does not match", i), res[i], expectedResults[i]);
}
}
if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
}
use of org.apache.drill.exec.ops.FragmentContext in project drill by apache.
the class TestRepeatedFunction method testRepeated.
@Test
public void testRepeated(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
// System.out.println(System.getProperty("java.class.path"));
mockDrillbitContext(bitContext);
final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_repeated_1.json"), Charsets.UTF_8));
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
boolean oneIsOne = false;
int size = 0;
final int[] sizes = { 1, 2, 0, 6 };
while (exec.next()) {
final IntVector c1 = exec.getValueVectorById(new SchemaPath("cnt", ExpressionPosition.UNKNOWN), IntVector.class);
final BitVector c2 = exec.getValueVectorById(new SchemaPath("has_min", ExpressionPosition.UNKNOWN), BitVector.class);
for (int i = 0; i < exec.getRecordCount(); i++) {
final int curSize = sizes[size % sizes.length];
assertEquals(curSize, c1.getAccessor().get(i));
switch(curSize) {
case 1:
assertEquals(oneIsOne, 1 == c2.getAccessor().get(i));
oneIsOne = !oneIsOne;
break;
case 2:
assertEquals(1, c2.getAccessor().get(i));
break;
case 0:
assertEquals(0, c2.getAccessor().get(i));
break;
case 6:
assertEquals(1, c2.getAccessor().get(i));
break;
}
size++;
}
}
if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
}
use of org.apache.drill.exec.ops.FragmentContext in project drill by apache.
the class ParquetRecordReaderTest method testPerformance.
@Test
@Ignore
public void testPerformance(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Exception {
final DrillConfig c = DrillConfig.create();
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContext context = new FragmentContext(bitContext, BitControl.PlanFragment.getDefaultInstance(), connection, registry);
// new NonStrictExpectations() {
// {
// context.getAllocator(); result = BufferAllocator.getAllocator(DrillConfig.create());
// }
// };
final String fileName = "/tmp/parquet_test_performance.parquet";
final HashMap<String, FieldInfo> fields = new HashMap<>();
final ParquetTestProperties props = new ParquetTestProperties(1, 20 * 1000 * 1000, DEFAULT_BYTES_PER_PAGE, fields);
populateFieldInfoMap(props);
//generateParquetFile(fileName, props);
final Configuration dfsConfig = new Configuration();
final List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
final Footer f = footers.iterator().next();
final List<SchemaPath> columns = Lists.newArrayList();
columns.add(new SchemaPath("_MAP.integer", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bigInt", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.f", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.d", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.b", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin2", ExpressionPosition.UNKNOWN));
int totalRowCount = 0;
final FileSystem fs = new CachedSingleFileSystem(fileName);
final BufferAllocator allocator = RootAllocatorFactory.newRoot(c);
for (int i = 0; i < 25; i++) {
final ParquetRecordReader rr = new ParquetRecordReader(context, fileName, 0, fs, CodecFactory.createDirectCodecFactory(dfsConfig, new ParquetDirectByteBufferAllocator(allocator), 0), f.getParquetMetadata(), columns, ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_CORRUPTION);
final TestOutputMutator mutator = new TestOutputMutator(allocator);
rr.setup(null, mutator);
final Stopwatch watch = Stopwatch.createStarted();
int rowCount = 0;
while ((rowCount = rr.next()) > 0) {
totalRowCount += rowCount;
}
System.out.println(String.format("Time completed: %s. ", watch.elapsed(TimeUnit.MILLISECONDS)));
rr.close();
}
allocator.close();
System.out.println(String.format("Total row count %s", totalRowCount));
}
use of org.apache.drill.exec.ops.FragmentContext in project drill by apache.
the class TestParquetFilterPushDown method initFSAndCreateFragContext.
@BeforeClass
public static void initFSAndCreateFragContext() throws Exception {
fragContext = new FragmentContext(bits[0].getContext(), BitControl.PlanFragment.getDefaultInstance(), null, bits[0].getContext().getFunctionImplementationRegistry());
Configuration conf = new Configuration();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
fs = FileSystem.get(conf);
}
use of org.apache.drill.exec.ops.FragmentContext in project drill by apache.
the class TestTraceOutputDump method testFilter.
@Test
public void testFilter(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
mockDrillbitContext(bitContext);
final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/trace/simple_trace.json"), Charsets.UTF_8));
final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
while (exec.next()) {
}
exec.close();
if (context.getFailureCause() != null) {
throw context.getFailureCause();
}
assertTrue(!context.isFailed());
final FragmentHandle handle = context.getHandle();
/* Form the file name to which the trace output will dump the record batches */
final String qid = QueryIdHelper.getQueryId(handle.getQueryId());
final int majorFragmentId = handle.getMajorFragmentId();
final int minorFragmentId = handle.getMinorFragmentId();
final String logLocation = c.getString(ExecConstants.TRACE_DUMP_DIRECTORY);
System.out.println("Found log location: " + logLocation);
final String filename = String.format("%s//%s_%d_%d_mock-scan", logLocation, qid, majorFragmentId, minorFragmentId);
System.out.println("File Name: " + filename);
final Configuration conf = new Configuration();
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, c.getString(ExecConstants.TRACE_DUMP_FILESYSTEM));
final FileSystem fs = FileSystem.get(conf);
final Path path = new Path(filename);
assertTrue("Trace file does not exist", fs.exists(path));
final FSDataInputStream in = fs.open(path);
final VectorAccessibleSerializable wrap = new VectorAccessibleSerializable(context.getAllocator());
wrap.readFromStream(in);
final VectorAccessible container = wrap.get();
/* Assert there are no selection vectors */
assertTrue(wrap.getSv2() == null);
/* Assert there is only one record */
assertTrue(container.getRecordCount() == 1);
/* Read the Integer value and ASSERT its Integer.MIN_VALUE */
final int value = (int) container.iterator().next().getValueVector().getAccessor().getObject(0);
assertTrue(value == Integer.MIN_VALUE);
}
Aggregations