use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestUnnestCorrectness method testUnnestNonArrayColumn.
@Test
public void testUnnestNonArrayColumn() {
Object[][] data = { { new Integer(1), new Integer(3) }, { new Integer(6), new Integer(10) } };
// Create input schema
TupleMetadata incomingSchema = new SchemaBuilder().add("rowNumber", TypeProtos.MinorType.INT).add("unnestColumn", TypeProtos.MinorType.INT).buildSchema();
TupleMetadata[] incomingSchemas = { incomingSchema, incomingSchema };
// We expect an Exception
Integer[][] baseline = {};
RecordBatch.IterOutcome[] iterOutcomes = { RecordBatch.IterOutcome.OK_NEW_SCHEMA, RecordBatch.IterOutcome.OK };
try {
testUnnest(incomingSchemas, iterOutcomes, data, baseline);
} catch (UserException e) {
// succeeded
return;
} catch (Exception e) {
fail("Failed due to exception: " + e.getMessage());
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestUnnestCorrectness method testUnnest.
// test unnest for various input conditions optionally invoking kill. if the kill or killBatch
// parameter is greater than 0 then the record batch is sent a kill after that many batches have been processed
private <T> void testUnnest(TupleMetadata[] incomingSchemas, RecordBatch.IterOutcome[] iterOutcomes, // kill unnest after every 'unnestLimit' number of values in every record
int unnestLimit, // number of batches after which to kill the execution (!)
int execKill, T[][] data, T[][] baseline) throws Exception {
// Get the incoming container with dummy data for LJ
final List<VectorContainer> incomingContainer = new ArrayList<>(data.length);
// Create data
ArrayList<RowSet.SingleRowSet> rowSets = new ArrayList<>();
int rowNumber = 0;
int batchNum = 0;
for (Object[] recordBatch : data) {
RowSetBuilder rowSetBuilder = fixture.rowSetBuilder(incomingSchemas[batchNum]);
for (Object rowData : recordBatch) {
rowSetBuilder.addRow(++rowNumber, rowData);
}
RowSet.SingleRowSet rowSet = rowSetBuilder.build();
rowSets.add(rowSet);
incomingContainer.add(rowSet.container());
batchNum++;
}
// Get the unnest POPConfig
final UnnestPOP unnestPopConfig = new UnnestPOP(null, new SchemaPath(new PathSegment.NameSegment("unnestColumn")), DrillUnnestRelBase.IMPLICIT_COLUMN);
// Get the IterOutcomes for LJ
final List<RecordBatch.IterOutcome> outcomes = new ArrayList<>(iterOutcomes.length);
for (RecordBatch.IterOutcome o : iterOutcomes) {
outcomes.add(o);
}
// Create incoming MockRecordBatch
final MockRecordBatch incomingMockBatch = new MockRecordBatch(fixture.getFragmentContext(), operatorContext, incomingContainer, outcomes, incomingContainer.get(0).getSchema());
final MockLateralJoinBatch lateralJoinBatch = new MockLateralJoinBatch(fixture.getFragmentContext(), operatorContext, incomingMockBatch);
// setup Unnest record batch
final UnnestRecordBatch unnestBatch = new UnnestRecordBatch(unnestPopConfig, fixture.getFragmentContext());
// set pointer to Lateral in unnest pop config
unnestBatch.setIncoming((LateralContract) lateralJoinBatch);
// set backpointer to lateral join in unnest
lateralJoinBatch.setUnnest(unnestBatch);
lateralJoinBatch.setUnnestLimit(unnestLimit);
// Simulate the pipeline by calling next on the incoming
List<ValueVector> results = null;
int batchesProcessed = 0;
try {
while (!isTerminal(lateralJoinBatch.next())) {
batchesProcessed++;
if (batchesProcessed == execKill) {
lateralJoinBatch.getContext().getExecutorState().fail(new DrillException("Testing failure of execution."));
lateralJoinBatch.cancel();
}
// else nothing to do
}
// Check results against baseline
results = lateralJoinBatch.getResultList();
int i = 0;
for (ValueVector vv : results) {
int valueCount = vv.getAccessor().getValueCount();
if (valueCount != baseline[i].length) {
fail("Test failed in validating unnest output. Value count mismatch.");
}
for (int j = 0; j < valueCount; j++) {
if (vv instanceof MapVector) {
if (!compareMapBaseline(baseline[i][j], vv.getAccessor().getObject(j))) {
fail("Test failed in validating unnest(Map) output. Value mismatch");
}
} else if (vv instanceof VarCharVector) {
Object val = vv.getAccessor().getObject(j);
if (((String) baseline[i][j]).compareTo(val.toString()) != 0) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[]" + i + "][" + j + "]" + ": " + baseline[i][j] + " VV.getObject(j): " + val);
}
} else {
Object val = vv.getAccessor().getObject(j);
if (!baseline[i][j].equals(val)) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[" + i + "][" + j + "]" + ": " + baseline[i][j] + " VV.getObject(j): " + val);
}
}
}
i++;
}
assertTrue(lateralJoinBatch.isCompleted());
} catch (UserException e) {
// Valid exception
throw e;
} catch (Exception e) {
fail("Test failed in validating unnest output. Exception : " + e.getMessage());
} finally {
// Close all the resources for this test case
unnestBatch.close();
lateralJoinBatch.close();
incomingMockBatch.close();
if (results != null) {
for (ValueVector vv : results) {
vv.clear();
}
}
for (RowSet.SingleRowSet rowSet : rowSets) {
rowSet.clear();
}
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestFileScanLifecycle method testCtorUserError.
/**
* Verify that errors thrown from file-based readers include the file name
* in addition to the scan and reader level error contexts.
*/
@Test
public void testCtorUserError() {
FileScanLifecycleBuilder builder = new FileScanLifecycleBuilder();
builder.errorContext(b -> b.addContext("Scan context"));
builder.rootDir(MOCK_ROOT_PATH);
builder.maxPartitionDepth(3);
builder.projection(FileScanUtils.projectAllWithMetadata(3));
builder.fileSplits(Collections.singletonList(new DummyFileWork(MOCK_FILE_PATH)));
builder.useLegacyWildcardExpansion(true);
builder.readerFactory(new FileReaderFactory() {
@Override
public ManagedReader newReader(FileSchemaNegotiator negotiator) {
return new FailingReader(negotiator, "ctor-u");
}
});
ScanLifecycle scan = buildScan(builder);
RowBatchReader reader = scan.nextReader();
try {
reader.open();
fail();
} catch (UserException e) {
String msg = e.getMessage();
assertTrue(msg.contains("Oops ctor"));
assertTrue(msg.contains("My custom context"));
assertTrue(msg.contains("Scan context"));
assertTrue(msg.contains(MOCK_FILE_NAME));
assertNull(e.getCause());
}
scan.close();
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestImplicitColumnResolver method testProvidedImplicitColInternal.
@Test
public void testProvidedImplicitColInternal() {
TupleMetadata providedSchema = new SchemaBuilder().add("myLmt", MinorType.INT).build();
SchemaUtils.markImplicit(providedSchema.metadata("myLmt"), ScanTestUtils.LAST_MODIFIED_TIME_COL);
ParserFixture parseFixture = new ParserFixture(RowSetTestUtils.projectAll());
parseFixture.tracker.applyProvidedSchema(providedSchema);
try {
parseFixture.parseImplicit();
} catch (UserException e) {
assertTrue(e.getMessage().contains("references an undefined implicit column type"));
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestImplicitColumnResolver method testProvidedImplicitColModeConflict.
@Test
public void testProvidedImplicitColModeConflict() {
TupleMetadata providedSchema = new SchemaBuilder().addArray("myFqn", MinorType.VARCHAR).build();
SchemaUtils.markImplicit(providedSchema.metadata("myFqn"), ColumnMetadata.IMPLICIT_FQN);
ParserFixture parseFixture = new ParserFixture(RowSetTestUtils.projectAll());
parseFixture.tracker.applyProvidedSchema(providedSchema);
try {
parseFixture.parseImplicit();
} catch (UserException e) {
assertTrue(e.getMessage().contains("wrong type"));
}
}
Aggregations