Search in sources :

Example 1 with UserException

use of org.apache.drill.common.exceptions.UserException in project drill by apache.

the class AsyncPageReader method readDictionaryPageData.

// Read and decode the dictionary data
private void readDictionaryPageData(final ReadStatus readStatus, final ColumnReader<?> parentStatus) throws UserException {
    try {
        pageHeader = readStatus.getPageHeader();
        int uncompressedSize = pageHeader.getUncompressed_page_size();
        final DrillBuf dictionaryData = getDecompressedPageData(readStatus);
        Stopwatch timer = Stopwatch.createStarted();
        allocatedDictionaryBuffers.add(dictionaryData);
        DictionaryPage page = new DictionaryPage(asBytesInput(dictionaryData, 0, uncompressedSize), pageHeader.uncompressed_page_size, pageHeader.dictionary_page_header.num_values, valueOf(pageHeader.dictionary_page_header.encoding.name()));
        this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
        long timeToDecode = timer.elapsed(TimeUnit.NANOSECONDS);
        stats.timeDictPageDecode.addAndGet(timeToDecode);
    } catch (Exception e) {
        handleAndThrowException(e, "Error decoding dictionary page.");
    }
}
Also used : Stopwatch(com.google.common.base.Stopwatch) DictionaryPage(org.apache.parquet.column.page.DictionaryPage) UserException(org.apache.drill.common.exceptions.UserException) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) IOException(java.io.IOException) DrillBuf(io.netty.buffer.DrillBuf)

Example 2 with UserException

use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.

the class MaprDBJsonRecordReader method next.

@Override
public int next() {
    Stopwatch watch = Stopwatch.createUnstarted();
    watch.start();
    vectorWriter.allocate();
    vectorWriter.reset();
    int recordCount = 0;
    DBDocumentReaderBase reader = null;
    while (recordCount < BaseValueVector.INITIAL_VALUE_ALLOCATION) {
        vectorWriter.setPosition(recordCount);
        try {
            reader = nextDocumentReader();
            if (reader == null) {
                // no more documents for this scanner
                break;
            } else if (isSkipQuery()) {
                vectorWriter.rootAsMap().bit("count").writeBit(1);
            } else {
                MapOrListWriterImpl writer = new MapOrListWriterImpl(vectorWriter.rootAsMap());
                if (idOnly) {
                    writeId(writer, reader.getId());
                } else {
                    if (reader.next() != EventType.START_MAP) {
                        throw dataReadError("The document did not start with START_MAP!");
                    }
                    writeToListOrMap(writer, reader);
                }
            }
            recordCount++;
        } catch (UserException e) {
            throw UserException.unsupportedError(e).addContext(String.format("Table: %s, document id: '%s'", table.getPath(), reader == null ? null : IdCodec.asString(reader.getId()))).build(logger);
        } catch (SchemaChangeException e) {
            if (ignoreSchemaChange) {
                logger.warn("{}. Dropping the row from result.", e.getMessage());
                logger.debug("Stack trace:", e);
            } else {
                throw dataReadError(e);
            }
        }
    }
    if (nonExistentColumnsProjection && recordCount > 0) {
        JsonReaderUtils.ensureAtLeastOneField(vectorWriter, getColumns(), allTextMode, Collections.EMPTY_LIST);
    }
    vectorWriter.setValueCount(recordCount);
    logger.debug("Took {} ms to get {} records", watch.elapsed(TimeUnit.MILLISECONDS), recordCount);
    return recordCount;
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) DBDocumentReaderBase(com.mapr.db.ojai.DBDocumentReaderBase) MapOrListWriterImpl(org.apache.drill.exec.vector.complex.impl.MapOrListWriterImpl) Stopwatch(com.google.common.base.Stopwatch) UserException(org.apache.drill.common.exceptions.UserException)

Example 3 with UserException

use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.

the class PreProcessLogicalRel method getConvertFunctionException.

private UserException getConvertFunctionException(final String functionName, final String typeName) {
    final String newFunctionName = functionName + typeName;
    final String typeNameToPrint = typeName.length() == 0 ? "<empty_string>" : typeName;
    final UserException.Builder exceptionBuilder = UserException.unsupportedError().message("%s does not support conversion %s type '%s'.", functionName, functionName.substring(8).toLowerCase(), typeNameToPrint);
    // Build a nice error message
    if (typeName.length() > 0) {
        List<String> ops = new ArrayList<>();
        for (SqlOperator op : table.getOperatorList()) {
            ops.add(op.getName());
        }
        final String bestMatch = ApproximateStringMatcher.getBestMatch(ops, newFunctionName);
        if (bestMatch != null && bestMatch.length() > functionName.length() && bestMatch.toLowerCase().startsWith("convert")) {
            final StringBuilder s = new StringBuilder("Did you mean ").append(bestMatch.substring(functionName.length())).append("?");
            exceptionBuilder.addContext(s.toString());
        }
    }
    return exceptionBuilder.build(logger);
}
Also used : SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) NlsString(org.apache.calcite.util.NlsString) UserException(org.apache.drill.common.exceptions.UserException)

Example 4 with UserException

use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.

the class AsyncPageReader method readDictionaryPageData.

// Read and decode the dictionary data
private void readDictionaryPageData(final ReadStatus readStatus, final ColumnReader<?> parentStatus) throws UserException {
    try {
        pageHeader = readStatus.getPageHeader();
        int uncompressedSize = pageHeader.getUncompressed_page_size();
        final DrillBuf dictionaryData = getDecompressedPageData(readStatus);
        Stopwatch timer = Stopwatch.createStarted();
        allocatedDictionaryBuffers.add(dictionaryData);
        DictionaryPage page = new DictionaryPage(asBytesInput(dictionaryData, 0, uncompressedSize), pageHeader.uncompressed_page_size, pageHeader.dictionary_page_header.num_values, valueOf(pageHeader.dictionary_page_header.encoding.name()));
        this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
        long timeToDecode = timer.elapsed(TimeUnit.NANOSECONDS);
        stats.timeDictPageDecode.addAndGet(timeToDecode);
    } catch (Exception e) {
        handleAndThrowException(e, "Error decoding dictionary page.");
    }
}
Also used : Stopwatch(com.google.common.base.Stopwatch) DictionaryPage(org.apache.parquet.column.page.DictionaryPage) UserException(org.apache.drill.common.exceptions.UserException) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) IOException(java.io.IOException) EOFException(java.io.EOFException) ExecutionException(java.util.concurrent.ExecutionException) DrillBuf(io.netty.buffer.DrillBuf)

Example 5 with UserException

use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.

the class TestResultSetLoaderOverflow method testOversizeArray.

/**
 * Case where a single array fills up the vector to the maximum size
 * limit. Overflow won't work here; the attempt will fail with a user
 * exception.
 */
@Test
public void testOversizeArray() {
    TupleMetadata schema = new SchemaBuilder().addArray("s", MinorType.VARCHAR).buildSchema();
    ResultSetOptions options = new OptionBuilder().setRowCountLimit(ValueVector.MAX_ROW_COUNT).setSchema(schema).build();
    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
    RowSetLoader rootWriter = rsLoader.writer();
    // Create a single array as the column value in the first row. When
    // this overflows, an exception is thrown since overflow is not possible.
    rsLoader.startBatch();
    byte[] value = new byte[473];
    Arrays.fill(value, (byte) 'X');
    rootWriter.start();
    ScalarWriter array = rootWriter.array(0).scalar();
    try {
        for (int i = 0; i < ValueVector.MAX_ROW_COUNT; i++) {
            array.setBytes(value, value.length);
        }
        fail();
    } catch (UserException e) {
        assertTrue(e.getMessage().contains("column value is larger than the maximum"));
    }
    rsLoader.close();
}
Also used : ResultSetLoader(org.apache.drill.exec.physical.rowSet.ResultSetLoader) TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.test.rowSet.schema.SchemaBuilder) UserException(org.apache.drill.common.exceptions.UserException) RowSetLoader(org.apache.drill.exec.physical.rowSet.RowSetLoader) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter) ResultSetOptions(org.apache.drill.exec.physical.rowSet.impl.ResultSetLoaderImpl.ResultSetOptions) SubOperatorTest(org.apache.drill.test.SubOperatorTest) Test(org.junit.Test)

Aggregations

UserException (org.apache.drill.common.exceptions.UserException)102 Test (org.junit.Test)76 EvfTest (org.apache.drill.categories.EvfTest)39 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)30 SubOperatorTest (org.apache.drill.test.SubOperatorTest)30 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)28 RowBatchReader (org.apache.drill.exec.physical.impl.scan.RowBatchReader)12 ManagedReader (org.apache.drill.exec.physical.impl.scan.v3.ManagedReader)12 ScanLifecycleBuilder (org.apache.drill.exec.physical.impl.scan.v3.ScanLifecycleBuilder)11 SchemaNegotiator (org.apache.drill.exec.physical.impl.scan.v3.SchemaNegotiator)11 ScanOperatorExec (org.apache.drill.exec.physical.impl.scan.ScanOperatorExec)9 ScanFixture (org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture)9 SchemaPath (org.apache.drill.common.expression.SchemaPath)8 ResultSetOptions (org.apache.drill.exec.physical.resultSet.impl.ResultSetLoaderImpl.ResultSetOptions)8 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)8 MockRecordBatch (org.apache.drill.exec.physical.impl.MockRecordBatch)6 ArrayList (java.util.ArrayList)5 OperatorTest (org.apache.drill.categories.OperatorTest)5 DrillException (org.apache.drill.common.exceptions.DrillException)5 BaseTest (org.apache.drill.test.BaseTest)5