use of org.apache.drill.common.exceptions.UserException in project drill by apache.
the class AsyncPageReader method readDictionaryPageData.
// Read and decode the dictionary data
private void readDictionaryPageData(final ReadStatus readStatus, final ColumnReader<?> parentStatus) throws UserException {
try {
pageHeader = readStatus.getPageHeader();
int uncompressedSize = pageHeader.getUncompressed_page_size();
final DrillBuf dictionaryData = getDecompressedPageData(readStatus);
Stopwatch timer = Stopwatch.createStarted();
allocatedDictionaryBuffers.add(dictionaryData);
DictionaryPage page = new DictionaryPage(asBytesInput(dictionaryData, 0, uncompressedSize), pageHeader.uncompressed_page_size, pageHeader.dictionary_page_header.num_values, valueOf(pageHeader.dictionary_page_header.encoding.name()));
this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
long timeToDecode = timer.elapsed(TimeUnit.NANOSECONDS);
stats.timeDictPageDecode.addAndGet(timeToDecode);
} catch (Exception e) {
handleAndThrowException(e, "Error decoding dictionary page.");
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.
the class MaprDBJsonRecordReader method next.
@Override
public int next() {
Stopwatch watch = Stopwatch.createUnstarted();
watch.start();
vectorWriter.allocate();
vectorWriter.reset();
int recordCount = 0;
DBDocumentReaderBase reader = null;
while (recordCount < BaseValueVector.INITIAL_VALUE_ALLOCATION) {
vectorWriter.setPosition(recordCount);
try {
reader = nextDocumentReader();
if (reader == null) {
// no more documents for this scanner
break;
} else if (isSkipQuery()) {
vectorWriter.rootAsMap().bit("count").writeBit(1);
} else {
MapOrListWriterImpl writer = new MapOrListWriterImpl(vectorWriter.rootAsMap());
if (idOnly) {
writeId(writer, reader.getId());
} else {
if (reader.next() != EventType.START_MAP) {
throw dataReadError("The document did not start with START_MAP!");
}
writeToListOrMap(writer, reader);
}
}
recordCount++;
} catch (UserException e) {
throw UserException.unsupportedError(e).addContext(String.format("Table: %s, document id: '%s'", table.getPath(), reader == null ? null : IdCodec.asString(reader.getId()))).build(logger);
} catch (SchemaChangeException e) {
if (ignoreSchemaChange) {
logger.warn("{}. Dropping the row from result.", e.getMessage());
logger.debug("Stack trace:", e);
} else {
throw dataReadError(e);
}
}
}
if (nonExistentColumnsProjection && recordCount > 0) {
JsonReaderUtils.ensureAtLeastOneField(vectorWriter, getColumns(), allTextMode, Collections.EMPTY_LIST);
}
vectorWriter.setValueCount(recordCount);
logger.debug("Took {} ms to get {} records", watch.elapsed(TimeUnit.MILLISECONDS), recordCount);
return recordCount;
}
use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.
the class PreProcessLogicalRel method getConvertFunctionException.
private UserException getConvertFunctionException(final String functionName, final String typeName) {
final String newFunctionName = functionName + typeName;
final String typeNameToPrint = typeName.length() == 0 ? "<empty_string>" : typeName;
final UserException.Builder exceptionBuilder = UserException.unsupportedError().message("%s does not support conversion %s type '%s'.", functionName, functionName.substring(8).toLowerCase(), typeNameToPrint);
// Build a nice error message
if (typeName.length() > 0) {
List<String> ops = new ArrayList<>();
for (SqlOperator op : table.getOperatorList()) {
ops.add(op.getName());
}
final String bestMatch = ApproximateStringMatcher.getBestMatch(ops, newFunctionName);
if (bestMatch != null && bestMatch.length() > functionName.length() && bestMatch.toLowerCase().startsWith("convert")) {
final StringBuilder s = new StringBuilder("Did you mean ").append(bestMatch.substring(functionName.length())).append("?");
exceptionBuilder.addContext(s.toString());
}
}
return exceptionBuilder.build(logger);
}
use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.
the class AsyncPageReader method readDictionaryPageData.
// Read and decode the dictionary data
private void readDictionaryPageData(final ReadStatus readStatus, final ColumnReader<?> parentStatus) throws UserException {
try {
pageHeader = readStatus.getPageHeader();
int uncompressedSize = pageHeader.getUncompressed_page_size();
final DrillBuf dictionaryData = getDecompressedPageData(readStatus);
Stopwatch timer = Stopwatch.createStarted();
allocatedDictionaryBuffers.add(dictionaryData);
DictionaryPage page = new DictionaryPage(asBytesInput(dictionaryData, 0, uncompressedSize), pageHeader.uncompressed_page_size, pageHeader.dictionary_page_header.num_values, valueOf(pageHeader.dictionary_page_header.encoding.name()));
this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
long timeToDecode = timer.elapsed(TimeUnit.NANOSECONDS);
stats.timeDictPageDecode.addAndGet(timeToDecode);
} catch (Exception e) {
handleAndThrowException(e, "Error decoding dictionary page.");
}
}
use of org.apache.drill.common.exceptions.UserException in project drill by axbaretto.
the class TestResultSetLoaderOverflow method testOversizeArray.
/**
* Case where a single array fills up the vector to the maximum size
* limit. Overflow won't work here; the attempt will fail with a user
* exception.
*/
@Test
public void testOversizeArray() {
TupleMetadata schema = new SchemaBuilder().addArray("s", MinorType.VARCHAR).buildSchema();
ResultSetOptions options = new OptionBuilder().setRowCountLimit(ValueVector.MAX_ROW_COUNT).setSchema(schema).build();
ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
RowSetLoader rootWriter = rsLoader.writer();
// Create a single array as the column value in the first row. When
// this overflows, an exception is thrown since overflow is not possible.
rsLoader.startBatch();
byte[] value = new byte[473];
Arrays.fill(value, (byte) 'X');
rootWriter.start();
ScalarWriter array = rootWriter.array(0).scalar();
try {
for (int i = 0; i < ValueVector.MAX_ROW_COUNT; i++) {
array.setBytes(value, value.length);
}
fail();
} catch (UserException e) {
assertTrue(e.getMessage().contains("column value is larger than the maximum"));
}
rsLoader.close();
}
Aggregations