use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class HBasePersistentStoreProvider method start.
@Override
public void start() throws IOException {
this.connection = ConnectionFactory.createConnection(hbaseConf);
try (Admin admin = connection.getAdmin()) {
if (!admin.tableExists(hbaseTableName)) {
HTableDescriptor desc = new HTableDescriptor(hbaseTableName);
desc.addFamily(new HColumnDescriptor(FAMILY).setMaxVersions(1));
admin.createTable(desc);
} else {
HTableDescriptor desc = admin.getTableDescriptor(hbaseTableName);
if (!desc.hasFamily(FAMILY)) {
throw new DrillRuntimeException("The HBase table " + hbaseTableName + " specified as persistent store exists but does not contain column family: " + (Bytes.toString(FAMILY)));
}
}
}
this.hbaseTable = connection.getTable(hbaseTableName);
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class AvroRecordReader method processPrimitive.
private void processPrimitive(final Object value, final Schema.Type type, final String fieldName, final MapOrListWriterImpl writer) {
if (value == null) {
return;
}
switch(type) {
case STRING:
byte[] binary = null;
final int length;
if (value instanceof Utf8) {
binary = ((Utf8) value).getBytes();
length = ((Utf8) value).getByteLength();
} else {
binary = value.toString().getBytes(Charsets.UTF_8);
length = binary.length;
}
ensure(length);
buffer.setBytes(0, binary);
writer.varChar(fieldName).writeVarChar(0, length, buffer);
break;
case INT:
writer.integer(fieldName).writeInt((Integer) value);
break;
case LONG:
writer.bigInt(fieldName).writeBigInt((Long) value);
break;
case FLOAT:
writer.float4(fieldName).writeFloat4((Float) value);
break;
case DOUBLE:
writer.float8(fieldName).writeFloat8((Double) value);
break;
case BOOLEAN:
writer.bit(fieldName).writeBit((Boolean) value ? 1 : 0);
break;
case BYTES:
final ByteBuffer buf = (ByteBuffer) value;
length = buf.remaining();
ensure(length);
buffer.setBytes(0, buf);
writer.binary(fieldName).writeVarBinary(0, length, buffer);
break;
case NULL:
// Nothing to do for null type
break;
case ENUM:
final String symbol = value.toString();
final byte[] b;
try {
b = symbol.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new DrillRuntimeException("Unable to read enum value for field: " + fieldName, e);
}
ensure(b.length);
buffer.setBytes(0, b);
writer.varChar(fieldName).writeVarChar(0, b.length, buffer);
break;
default:
throw new DrillRuntimeException("Unhandled Avro type: " + type.toString());
}
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class AvroRecordReader method next.
@Override
public int next() {
final Stopwatch watch = Stopwatch.createStarted();
if (reader == null) {
throw new IllegalStateException("Avro reader is not open.");
}
if (!reader.hasNext()) {
return 0;
}
int recordCount = 0;
writer.allocate();
writer.reset();
try {
for (GenericContainer container = null; recordCount < DEFAULT_BATCH_SIZE && reader.hasNext() && !reader.pastSync(end); recordCount++) {
writer.setPosition(recordCount);
container = reader.next(container);
processRecord(container, container.getSchema());
}
writer.setValueCount(recordCount);
} catch (IOException e) {
throw new DrillRuntimeException(e);
}
logger.debug("Read {} records in {} ms", recordCount, watch.elapsed(TimeUnit.MILLISECONDS));
return recordCount;
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class BsonRecordReader method write.
public void write(ComplexWriter writer, BsonReader reader) throws IOException {
reader.readStartDocument();
BsonType readBsonType = reader.getCurrentBsonType();
switch(readBsonType) {
case DOCUMENT:
writeToListOrMap(reader, new MapOrListWriterImpl(writer.rootAsMap()), false, null);
break;
default:
throw new DrillRuntimeException("Root object must be DOCUMENT type. Found: " + readBsonType);
}
}
use of org.apache.drill.common.exceptions.DrillRuntimeException in project drill by apache.
the class FileSelection method createFromDirectories.
public static FileSelection createFromDirectories(final List<String> dirPaths, final FileSelection selection, final String cacheFileRoot) {
Stopwatch timer = Stopwatch.createStarted();
final String root = selection.getSelectionRoot();
if (Strings.isNullOrEmpty(root)) {
throw new DrillRuntimeException("Selection root is null or empty" + root);
}
if (dirPaths == null || dirPaths.isEmpty()) {
throw new DrillRuntimeException("List of directories is null or empty");
}
List<String> dirs = Lists.newArrayList();
if (selection.hadWildcard()) {
// for wildcard the directory list should have already been expanded
for (FileStatus status : selection.getFileStatuses()) {
dirs.add(status.getPath().toString());
}
} else {
for (String s : dirPaths) {
dirs.add(s);
}
}
final Path rootPath = handleWildCard(root);
// final URI uri = dirPaths.get(0).toUri();
final URI uri = selection.getFileStatuses().get(0).getPath().toUri();
final Path path = new Path(uri.getScheme(), uri.getAuthority(), rootPath.toUri().getPath());
FileSelection fileSel = new FileSelection(null, dirs, path.toString(), cacheFileRoot, false);
fileSel.setHadWildcard(selection.hadWildcard());
logger.info("FileSelection.createFromDirectories() took {} ms ", timer.elapsed(TimeUnit.MILLISECONDS));
return fileSel;
}
Aggregations