use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestResultSetLoaderProtocol method testCaseInsensitiveSchema.
/**
* Schemas are case insensitive by default. Verify that
* the schema mechanism works, with emphasis on the
* case insensitive case.
* <p>
* The tests here and elsewhere build columns from a
* <tt>MaterializedField</tt>. Doing so is rather old-school;
* better to use the newer <tt>ColumnMetadata</tt> which provides
* additional information. The code here simply uses the <tt>MaterializedField</tt>
* to create a <tt>ColumnMetadata</tt> implicitly.
*/
@Test
public void testCaseInsensitiveSchema() {
ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator());
RowSetLoader rootWriter = rsLoader.writer();
TupleMetadata schema = rootWriter.tupleSchema();
assertEquals(0, rsLoader.schemaVersion());
// No columns defined in schema
assertNull(schema.metadata("a"));
try {
schema.column(0);
fail();
} catch (IndexOutOfBoundsException e) {
// Expected
}
try {
rootWriter.column("a");
fail();
} catch (UndefinedColumnException e) {
// Expected
}
try {
rootWriter.column(0);
fail();
} catch (IndexOutOfBoundsException e) {
// Expected
}
// Define a column
assertEquals(0, rsLoader.schemaVersion());
MaterializedField colSchema = SchemaBuilder.columnSchema("a", MinorType.VARCHAR, DataMode.REQUIRED);
rootWriter.addColumn(colSchema);
assertEquals(1, rsLoader.schemaVersion());
// Can now be found, case insensitive
assertTrue(colSchema.isEquivalent(schema.column(0)));
ColumnMetadata colMetadata = schema.metadata(0);
assertSame(colMetadata, schema.metadata("a"));
assertSame(colMetadata, schema.metadata("A"));
assertNotNull(rootWriter.column(0));
assertNotNull(rootWriter.column("a"));
assertNotNull(rootWriter.column("A"));
assertEquals(1, schema.size());
assertEquals(0, schema.index("a"));
assertEquals(0, schema.index("A"));
try {
rootWriter.addColumn(colSchema);
fail();
} catch (UserException e) {
// Expected
}
try {
MaterializedField testCol = SchemaBuilder.columnSchema("A", MinorType.VARCHAR, DataMode.REQUIRED);
rootWriter.addColumn(testCol);
fail();
} catch (UserException e) {
// Expected
assertTrue(e.getMessage().contains("Duplicate"));
}
// Can still add required fields while writing the first row.
rsLoader.startBatch();
rootWriter.start();
rootWriter.scalar(0).setString("foo");
MaterializedField col2 = SchemaBuilder.columnSchema("b", MinorType.VARCHAR, DataMode.REQUIRED);
rootWriter.addColumn(col2);
assertEquals(2, rsLoader.schemaVersion());
assertTrue(col2.isEquivalent(schema.column(1)));
ColumnMetadata col2Metadata = schema.metadata(1);
assertSame(col2Metadata, schema.metadata("b"));
assertSame(col2Metadata, schema.metadata("B"));
assertEquals(2, schema.size());
assertEquals(1, schema.index("b"));
assertEquals(1, schema.index("B"));
rootWriter.scalar(1).setString("second");
// After first row, can add an optional or repeated.
// Also allows a required field: values will be back-filled.
rootWriter.save();
rootWriter.start();
rootWriter.scalar(0).setString("bar");
rootWriter.scalar(1).setString("");
MaterializedField col3 = SchemaBuilder.columnSchema("c", MinorType.VARCHAR, DataMode.REQUIRED);
rootWriter.addColumn(col3);
assertEquals(3, rsLoader.schemaVersion());
assertTrue(col3.isEquivalent(schema.column(2)));
ColumnMetadata col3Metadata = schema.metadata(2);
assertSame(col3Metadata, schema.metadata("c"));
assertSame(col3Metadata, schema.metadata("C"));
assertEquals(3, schema.size());
assertEquals(2, schema.index("c"));
assertEquals(2, schema.index("C"));
rootWriter.scalar("c").setString("c.2");
MaterializedField col4 = SchemaBuilder.columnSchema("d", MinorType.VARCHAR, DataMode.OPTIONAL);
rootWriter.addColumn(col4);
assertEquals(4, rsLoader.schemaVersion());
assertTrue(col4.isEquivalent(schema.column(3)));
ColumnMetadata col4Metadata = schema.metadata(3);
assertSame(col4Metadata, schema.metadata("d"));
assertSame(col4Metadata, schema.metadata("D"));
assertEquals(4, schema.size());
assertEquals(3, schema.index("d"));
assertEquals(3, schema.index("D"));
rootWriter.scalar("d").setString("d.2");
MaterializedField col5 = SchemaBuilder.columnSchema("e", MinorType.VARCHAR, DataMode.REPEATED);
rootWriter.addColumn(col5);
assertEquals(5, rsLoader.schemaVersion());
assertTrue(col5.isEquivalent(schema.column(4)));
ColumnMetadata col5Metadata = schema.metadata(4);
assertSame(col5Metadata, schema.metadata("e"));
assertSame(col5Metadata, schema.metadata("E"));
assertEquals(5, schema.size());
assertEquals(4, schema.index("e"));
assertEquals(4, schema.index("E"));
rootWriter.array(4).setObject(strArray("e1", "e2", "e3"));
MaterializedField col6 = SchemaBuilder.columnSchema("f", MinorType.BIGINT, DataMode.REPEATED);
rootWriter.addColumn(col6);
assertEquals(6, rsLoader.schemaVersion());
assertTrue(col6.isEquivalent(schema.column(5)));
ColumnMetadata col6Metadata = schema.metadata(5);
assertSame(col6Metadata, schema.metadata("f"));
assertSame(col6Metadata, schema.metadata("F"));
assertEquals(6, schema.size());
assertEquals(5, schema.index("f"));
assertEquals(5, schema.index("F"));
rootWriter.array(5).setObject(new Long[] { Long.MIN_VALUE, Long.MAX_VALUE });
MaterializedField col7 = SchemaBuilder.columnSchema("g", MinorType.INT, DataMode.REPEATED);
rootWriter.addColumn(col7);
assertEquals(7, rsLoader.schemaVersion());
assertTrue(col7.isEquivalent(schema.column(6)));
ColumnMetadata col7Metadata = schema.metadata(6);
assertSame(col7Metadata, schema.metadata("g"));
assertSame(col7Metadata, schema.metadata("G"));
assertEquals(7, schema.size());
assertEquals(6, schema.index("g"));
assertEquals(6, schema.index("G"));
rootWriter.array(6).setObject(new Integer[] { Integer.MIN_VALUE, Integer.MAX_VALUE });
MaterializedField col8 = SchemaBuilder.columnSchema("h", MinorType.INT, DataMode.REPEATED);
rootWriter.addColumn(col8);
assertEquals(8, rsLoader.schemaVersion());
assertTrue(col8.isEquivalent(schema.column(7)));
ColumnMetadata col8Metadata = schema.metadata(7);
assertSame(col8Metadata, schema.metadata("h"));
assertSame(col8Metadata, schema.metadata("H"));
assertEquals(8, schema.size());
assertEquals(7, schema.index("h"));
assertEquals(7, schema.index("H"));
rootWriter.array(7).setObject(new Short[] { Short.MIN_VALUE, Short.MAX_VALUE });
MaterializedField col9 = SchemaBuilder.columnSchema("i", MinorType.INT, DataMode.REPEATED);
rootWriter.addColumn(col9);
assertEquals(9, rsLoader.schemaVersion());
assertTrue(col9.isEquivalent(schema.column(8)));
ColumnMetadata col9Metadata = schema.metadata(8);
assertSame(col9Metadata, schema.metadata("i"));
assertSame(col9Metadata, schema.metadata("I"));
assertEquals(9, schema.size());
assertEquals(8, schema.index("i"));
assertEquals(8, schema.index("I"));
rootWriter.array(8).setObject(new Byte[] { Byte.MIN_VALUE, Byte.MAX_VALUE });
MaterializedField col10 = SchemaBuilder.columnSchema("j", MinorType.FLOAT8, DataMode.REPEATED);
rootWriter.addColumn(col10);
assertEquals(10, rsLoader.schemaVersion());
assertTrue(col10.isEquivalent(schema.column(9)));
ColumnMetadata col10Metadata = schema.metadata(9);
assertSame(col10Metadata, schema.metadata("j"));
assertSame(col10Metadata, schema.metadata("J"));
assertEquals(10, schema.size());
assertEquals(9, schema.index("j"));
assertEquals(9, schema.index("J"));
rootWriter.array(9).setObject(new Double[] { Double.MIN_VALUE, Double.MAX_VALUE });
MaterializedField col11 = SchemaBuilder.columnSchema("k", MinorType.FLOAT4, DataMode.REPEATED);
rootWriter.addColumn(col11);
assertEquals(11, rsLoader.schemaVersion());
assertTrue(col11.isEquivalent(schema.column(10)));
ColumnMetadata col11Metadata = schema.metadata(10);
assertSame(col11Metadata, schema.metadata("k"));
assertSame(col11Metadata, schema.metadata("K"));
assertEquals(11, schema.size());
assertEquals(10, schema.index("k"));
assertEquals(10, schema.index("K"));
rootWriter.array(10).setObject(new Float[] { Float.MIN_VALUE, Float.MAX_VALUE });
MaterializedField col12 = SchemaBuilder.columnSchema("l", MinorType.BIT, DataMode.REPEATED);
rootWriter.addColumn(col12);
assertEquals(12, rsLoader.schemaVersion());
assertTrue(col12.isEquivalent(schema.column(11)));
ColumnMetadata col12Metadata = schema.metadata(11);
assertSame(col12Metadata, schema.metadata("l"));
assertSame(col12Metadata, schema.metadata("L"));
assertEquals(12, schema.size());
assertEquals(11, schema.index("l"));
assertEquals(11, schema.index("L"));
rootWriter.array(11).setObject(new Boolean[] { Boolean.TRUE, Boolean.FALSE });
rootWriter.save();
// Verify. No reason to expect problems, but might as well check.
RowSet result = fixture.wrap(rsLoader.harvest());
assertEquals(12, rsLoader.schemaVersion());
SingleRowSet expected = fixture.rowSetBuilder(result.batchSchema()).addRow("foo", "second", "", null, strArray(), longArray(), intArray(), shortArray(), byteArray(), doubleArray(), floatArray(), boolArray()).addRow("bar", "", "c.2", "d.2", strArray("e1", "e2", "e3"), longArray(Long.MIN_VALUE, Long.MAX_VALUE), intArray(Integer.MIN_VALUE, Integer.MAX_VALUE), shortArray(Short.MIN_VALUE, Short.MAX_VALUE), byteArray((int) Byte.MIN_VALUE, (int) Byte.MAX_VALUE), doubleArray(Double.MIN_VALUE, Double.MAX_VALUE), floatArray(Float.MIN_VALUE, Float.MAX_VALUE), boolArray(Boolean.TRUE, Boolean.FALSE)).build();
RowSetUtilities.verify(expected, result);
// Handy way to test that close works to abort an in-flight batch
// and clean up.
rsLoader.close();
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestHttpPlugin method testUrlParamError.
/**
* When the user has configured an API connection with URL parameters,
* it is mandatory that those parameters are included in the WHERE clause. Drill
* will throw an exception if that parameter is not present.
* @throws Exception if anything goes wrong
*/
@Test
public void testUrlParamError() throws Exception {
String sql = "SELECT _response_url FROM local.github\n";
try (MockWebServer server = startServer()) {
server.enqueue(new MockResponse().setResponseCode(200).setBody(TEST_JSON_RESPONSE));
run(sql);
fail();
} catch (UserException e) {
assertTrue(e.getMessage().contains("API Query with URL Parameters must be populated."));
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestURLParameters method testDefaultParameterExtractorWithBlankDefault.
@Test
public void testDefaultParameterExtractorWithBlankDefault() {
HttpUrl pokemonUrl = HttpUrl.parse("https://pokeapi.co/api/{pokemon_name=}");
try {
SimpleHttp.getDefaultParameterValue(pokemonUrl, "pokemon_name");
fail();
} catch (UserException e) {
assertTrue(e.getMessage().contains("Default URL parameters must have a value."));
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestUnnestWithLateralCorrectness method testNestedUnnest.
/**
* Run a plan like the following for various input batches :
* Lateral1
* / \
* / Lateral2
* Scan / \
* / \
* Project1 Project2
* / \
* / \
* Unnest1 Unnest2
*
* @param incomingSchemas
* @param iterOutcomes
* @param execKill
* @param data
* @param baseline
* @param <T>
* @throws Exception
*/
private <T> void testNestedUnnest(TupleMetadata[] incomingSchemas, RecordBatch.IterOutcome[] iterOutcomes, // number of batches after which to kill the execution (!)
int execKill, T[][] data, T[][][] baseline) throws Exception {
// Get the incoming container with dummy data for LJ
final List<VectorContainer> incomingContainer = new ArrayList<>(data.length);
// Create data
ArrayList<RowSet.SingleRowSet> rowSets = new ArrayList<>();
int rowNumber = 0;
int batchNum = 0;
for (Object[] recordBatch : data) {
RowSetBuilder rowSetBuilder = fixture.rowSetBuilder(incomingSchemas[batchNum]);
for (Object rowData : recordBatch) {
rowSetBuilder.addRow(++rowNumber, rowData);
}
RowSet.SingleRowSet rowSet = rowSetBuilder.build();
rowSets.add(rowSet);
incomingContainer.add(rowSet.container());
batchNum++;
}
// Get the unnest POPConfig
final UnnestPOP unnestPopConfig1 = new UnnestPOP(null, SchemaPath.getSimplePath("unnestColumn"), DrillUnnestRelBase.IMPLICIT_COLUMN);
final UnnestPOP unnestPopConfig2 = new UnnestPOP(null, SchemaPath.getSimplePath("colB"), DrillUnnestRelBase.IMPLICIT_COLUMN);
// Get the IterOutcomes for LJ
final List<RecordBatch.IterOutcome> outcomes = new ArrayList<>(iterOutcomes.length);
for (RecordBatch.IterOutcome o : iterOutcomes) {
outcomes.add(o);
}
// Create incoming MockRecordBatch
final MockRecordBatch incomingMockBatch = new MockRecordBatch(fixture.getFragmentContext(), operatorContext, incomingContainer, outcomes, incomingContainer.get(0).getSchema());
// setup Unnest record batch
final UnnestRecordBatch unnestBatch1 = new UnnestRecordBatch(unnestPopConfig1, fixture.getFragmentContext());
final UnnestRecordBatch unnestBatch2 = new UnnestRecordBatch(unnestPopConfig2, fixture.getFragmentContext());
// Create intermediate Project
final Project projectPopConfig1 = new Project(DrillLogicalTestUtils.parseExprs("unnestColumn.colB", "colB", unnestPopConfig1.getImplicitColumn(), unnestPopConfig1.getImplicitColumn()), unnestPopConfig1);
final ProjectRecordBatch projectBatch1 = new ProjectRecordBatch(projectPopConfig1, unnestBatch1, fixture.getFragmentContext());
final Project projectPopConfig2 = new Project(DrillLogicalTestUtils.parseExprs("colB", "unnestColumn2", unnestPopConfig2.getImplicitColumn(), unnestPopConfig2.getImplicitColumn()), unnestPopConfig2);
final ProjectRecordBatch projectBatch2 = new ProjectRecordBatch(projectPopConfig2, unnestBatch2, fixture.getFragmentContext());
final LateralJoinPOP ljPopConfig2 = new LateralJoinPOP(projectPopConfig1, projectPopConfig2, JoinRelType.INNER, DrillLateralJoinRelBase.IMPLICIT_COLUMN, Lists.newArrayList());
final LateralJoinPOP ljPopConfig1 = new LateralJoinPOP(mockPopConfig, ljPopConfig2, JoinRelType.INNER, DrillLateralJoinRelBase.IMPLICIT_COLUMN, Lists.newArrayList());
final LateralJoinBatch lateralJoinBatch2 = new LateralJoinBatch(ljPopConfig2, fixture.getFragmentContext(), projectBatch1, projectBatch2);
final LateralJoinBatch lateralJoinBatch1 = new LateralJoinBatch(ljPopConfig1, fixture.getFragmentContext(), incomingMockBatch, lateralJoinBatch2);
// set pointer to Lateral in unnest
unnestBatch1.setIncoming((LateralContract) lateralJoinBatch1);
unnestBatch2.setIncoming((LateralContract) lateralJoinBatch2);
// Simulate the pipeline by calling next on the incoming
// results is an array ot batches, each batch being an array of output vectors.
List<List<ValueVector>> resultList = new ArrayList<>();
List<List<ValueVector>> results = null;
int batchesProcessed = 0;
try {
try {
while (!isTerminal(lateralJoinBatch1.next())) {
if (lateralJoinBatch1.getRecordCount() > 0) {
addBatchToResults(resultList, lateralJoinBatch1);
}
batchesProcessed++;
if (batchesProcessed == execKill) {
lateralJoinBatch1.getContext().getExecutorState().fail(new DrillException("Testing failure of execution."));
lateralJoinBatch1.cancel();
}
// else nothing to do
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
throw new Exception("Test failed to execute lateralJoinBatch.next() because: " + e.getMessage());
}
// Check results against baseline
results = resultList;
int batchIndex = 0;
int vectorIndex = 0;
// int valueIndex = 0;
for (List<ValueVector> batch : results) {
int vectorCount = batch.size();
if (vectorCount != baseline[batchIndex].length + 2) {
// baseline does not include the original unnest column(s)
fail("Test failed in validating unnest output. Batch column count mismatch.");
}
for (ValueVector vv : batch) {
if (vv.getField().getName().equals("unnestColumn") || vv.getField().getName().equals("colB")) {
// skip the original input column
continue;
}
int valueCount = vv.getAccessor().getValueCount();
if (valueCount != baseline[batchIndex][vectorIndex].length) {
fail("Test failed in validating unnest output. Value count mismatch in batch number " + (batchIndex + 1) + "" + ".");
}
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
if (vv instanceof MapVector) {
if (!compareMapBaseline(baseline[batchIndex][vectorIndex][valueIndex], vv.getAccessor().getObject(valueIndex))) {
fail("Test failed in validating unnest(Map) output. Value mismatch");
}
} else if (vv instanceof VarCharVector) {
Object val = vv.getAccessor().getObject(valueIndex);
if (((String) baseline[batchIndex][vectorIndex][valueIndex]).compareTo(val.toString()) != 0) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[]" + vectorIndex + "][" + valueIndex + "]" + ": " + baseline[vectorIndex][valueIndex] + " VV.getObject(valueIndex): " + val);
}
} else {
Object val = vv.getAccessor().getObject(valueIndex);
if (!baseline[batchIndex][vectorIndex][valueIndex].equals(val)) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[" + vectorIndex + "][" + valueIndex + "]" + ": " + baseline[batchIndex][vectorIndex][valueIndex] + " VV.getObject(valueIndex): " + val);
}
}
}
vectorIndex++;
}
vectorIndex = 0;
batchIndex++;
}
} catch (UserException e) {
// Valid exception
throw e;
} catch (Exception e) {
fail("Test failed. Exception : " + e.getMessage());
} finally {
// Close all the resources for this test case
unnestBatch1.close();
lateralJoinBatch1.close();
unnestBatch2.close();
lateralJoinBatch2.close();
incomingMockBatch.close();
if (results != null) {
for (List<ValueVector> batch : results) {
for (ValueVector vv : batch) {
vv.clear();
}
}
}
for (RowSet.SingleRowSet rowSet : rowSets) {
rowSet.clear();
}
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class TestUnnestWithLateralCorrectness method testUnnest.
// test unnest for various input conditions optionally invoking kill. if the kill or killBatch
// parameter is greater than 0 then the record batch is sent a kill after that many batches have been processed
private <T> void testUnnest(TupleMetadata[] incomingSchemas, RecordBatch.IterOutcome[] iterOutcomes, // kill unnest after every 'unnestLimit' number of values in every record
int unnestLimit, // number of batches after which to kill the execution (!)
int execKill, T[][] data, T[][][] baseline, boolean excludeUnnestColumn) throws Exception {
// Get the incoming container with dummy data for LJ
final List<VectorContainer> incomingContainer = new ArrayList<>(data.length);
// Create data
ArrayList<RowSet.SingleRowSet> rowSets = new ArrayList<>();
int rowNumber = 0;
int batchNum = 0;
for (Object[] recordBatch : data) {
RowSetBuilder rowSetBuilder = fixture.rowSetBuilder(incomingSchemas[batchNum]);
for (Object rowData : recordBatch) {
rowSetBuilder.addRow(++rowNumber, rowData);
}
RowSet.SingleRowSet rowSet = rowSetBuilder.build();
rowSets.add(rowSet);
incomingContainer.add(rowSet.container());
batchNum++;
}
// Get the unnest POPConfig
final UnnestPOP unnestPopConfig = new UnnestPOP(null, SchemaPath.getCompoundPath("unnestColumn"), DrillUnnestRelBase.IMPLICIT_COLUMN);
// Get the IterOutcomes for LJ
final List<RecordBatch.IterOutcome> outcomes = new ArrayList<>(iterOutcomes.length);
for (RecordBatch.IterOutcome o : iterOutcomes) {
outcomes.add(o);
}
// Create incoming MockRecordBatch
final MockRecordBatch incomingMockBatch = new MockRecordBatch(fixture.getFragmentContext(), operatorContext, incomingContainer, outcomes, incomingContainer.get(0).getSchema());
// setup Unnest record batch
final UnnestRecordBatch unnestBatch = new UnnestRecordBatch(unnestPopConfig, fixture.getFragmentContext());
// project is required to rename the columns so as to disambiguate the same column name from
// unnest operator and the regular scan.
final Project projectPopConfig = new Project(DrillLogicalTestUtils.parseExprs("unnestColumn", "unnestColumn1", unnestPopConfig.getImplicitColumn(), unnestPopConfig.getImplicitColumn()), null);
final ProjectRecordBatch projectBatch = new ProjectRecordBatch(projectPopConfig, unnestBatch, fixture.getFragmentContext());
final LateralJoinBatch lateralJoinBatch = new LateralJoinBatch(ljPopConfig, fixture.getFragmentContext(), incomingMockBatch, projectBatch);
// set pointer to Lateral in unnest
unnestBatch.setIncoming((LateralContract) lateralJoinBatch);
// Simulate the pipeline by calling next on the incoming
// results is an array of batches, each batch being an array of output vectors.
List<List<ValueVector>> resultList = new ArrayList<>();
List<List<ValueVector>> results = null;
int batchesProcessed = 0;
try {
try {
while (!isTerminal(lateralJoinBatch.next())) {
if (lateralJoinBatch.getRecordCount() > 0) {
addBatchToResults(resultList, lateralJoinBatch);
}
batchesProcessed++;
if (batchesProcessed == execKill) {
// Simulate by skipping out of the loop
break;
}
// else nothing to do
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
fail(e.getMessage());
}
// Check results against baseline
results = resultList;
int batchIndex = 0;
int vectorIndex = 0;
// int valueIndex = 0;
for (List<ValueVector> batch : results) {
int vectorCount = batch.size();
int expectedVectorCount = (excludeUnnestColumn) ? 0 : 1;
expectedVectorCount += baseline[batchIndex].length;
if (vectorCount != expectedVectorCount) {
// baseline does not include the original unnest column
fail("Test failed in validating unnest output. Batch column count mismatch.");
}
for (ValueVector vv : batch) {
if (vv.getField().getName().equals("unnestColumn")) {
// skip the original input column
continue;
}
int valueCount = vv.getAccessor().getValueCount();
if (valueCount != baseline[batchIndex][vectorIndex].length) {
fail("Test failed in validating unnest output. Value count mismatch in batch number " + (batchIndex + 1) + "" + ".");
}
for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) {
if (vv instanceof MapVector) {
if (!compareMapBaseline(baseline[batchIndex][vectorIndex][valueIndex], vv.getAccessor().getObject(valueIndex))) {
fail("Test failed in validating unnest(Map) output. Value mismatch");
}
} else if (vv instanceof VarCharVector) {
Object val = vv.getAccessor().getObject(valueIndex);
if (((String) baseline[batchIndex][vectorIndex][valueIndex]).compareTo(val.toString()) != 0) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[]" + vectorIndex + "][" + valueIndex + "]" + ": " + baseline[vectorIndex][valueIndex] + " VV.getObject(valueIndex): " + val);
}
} else {
Object val = vv.getAccessor().getObject(valueIndex);
if (!baseline[batchIndex][vectorIndex][valueIndex].equals(val)) {
fail("Test failed in validating unnest output. Value mismatch. Baseline value[" + vectorIndex + "][" + valueIndex + "]" + ": " + baseline[batchIndex][vectorIndex][valueIndex] + " VV.getObject(valueIndex): " + val);
}
}
}
vectorIndex++;
}
vectorIndex = 0;
batchIndex++;
}
} catch (UserException e) {
// Valid exception
throw e;
} catch (Exception e) {
fail("Test failed. Exception : " + e.getMessage());
} finally {
// Close all the resources for this test case
unnestBatch.close();
lateralJoinBatch.close();
incomingMockBatch.close();
if (results != null) {
for (List<ValueVector> batch : results) {
for (ValueVector vv : batch) {
vv.clear();
}
}
}
for (RowSet.SingleRowSet rowSet : rowSets) {
rowSet.clear();
}
}
}
Aggregations