use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.
the class KuduScanBatchCreator method getBatch.
@Override
public ScanBatch getBatch(ExecutorFragmentContext context, KuduSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.isEmpty());
List<RecordReader> readers = new LinkedList<>();
List<SchemaPath> columns = null;
for (KuduSubScan.KuduSubScanSpec scanSpec : subScan.getTabletScanSpecList()) {
try {
if ((columns = subScan.getColumns()) == null) {
columns = GroupScan.ALL_COLUMNS;
}
readers.add(new KuduRecordReader(subScan.getStorageEngine().getClient(), scanSpec, columns));
} catch (Exception e1) {
throw new ExecutionSetupException(e1);
}
}
return new ScanBatch(subScan, context, readers);
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.
the class OpenTSDBBatchCreator method getBatch.
@Override
public CloseableRecordBatch getBatch(ExecutorFragmentContext context, OpenTSDBSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
List<RecordReader> readers = new LinkedList<>();
List<SchemaPath> columns;
for (OpenTSDBSubScan.OpenTSDBSubScanSpec scanSpec : subScan.getTabletScanSpecList()) {
try {
if ((columns = subScan.getColumns()) == null) {
columns = GroupScan.ALL_COLUMNS;
}
readers.add(new OpenTSDBRecordReader(subScan.getStorageEngine().getClient(), scanSpec, columns));
} catch (Exception e) {
throw new ExecutionSetupException(e);
}
}
return new ScanBatch(subScan, context, readers);
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.
the class DynamicRootSchema method loadSchemaFactory.
/**
* load schema factory(storage plugin) for schemaName
* @param schemaName
* @param caseSensitive
*/
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
try {
SchemaPlus thisPlus = this.plus();
StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
if (plugin != null) {
plugin.registerSchemas(schemaConfig, thisPlus);
return;
}
// Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
String[] paths = schemaName.split("\\.");
if (paths.length == 2) {
plugin = getSchemaFactories().getPlugin(paths[0]);
if (plugin == null) {
return;
}
// Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
// register schema for this storage plugin to 'this'.
plugin.registerSchemas(schemaConfig, thisPlus);
// Load second level schemas for this storage plugin
final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
}
for (SchemaPlus schema : secondLevelSchemas) {
org.apache.drill.exec.store.AbstractSchema drillSchema;
try {
drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
} catch (ClassCastException e) {
throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
}
SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
thisPlus.add(wrapper.getName(), wrapper);
}
}
} catch (ExecutionSetupException | IOException ex) {
logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
}
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.
the class DynamicRootSchema method loadSchemaFactory.
/**
* load schema factory(storage plugin) for schemaName
* @param schemaName
* @param caseSensitive
*/
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
try {
SchemaPlus thisPlus = this.plus();
StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
if (plugin != null) {
plugin.registerSchemas(schemaConfig, thisPlus);
return;
}
// Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
String[] paths = schemaName.split("\\.");
if (paths.length == 2) {
plugin = getSchemaFactories().getPlugin(paths[0]);
if (plugin == null) {
return;
}
// Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
// register schema for this storage plugin to 'this'.
plugin.registerSchemas(schemaConfig, thisPlus);
// Load second level schemas for this storage plugin
final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
}
for (SchemaPlus schema : secondLevelSchemas) {
org.apache.drill.exec.store.AbstractSchema drillSchema;
try {
drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
} catch (ClassCastException e) {
throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
}
SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
thisPlus.add(wrapper.getName(), wrapper);
}
}
} catch (ExecutionSetupException | IOException ex) {
logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
}
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.
the class MockRecordReader method setup.
@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
try {
final int estimateRowSize = getEstimatedRecordSize(config.getTypes());
if (config.getTypes() == null) {
return;
}
valueVectors = new ValueVector[config.getTypes().length];
batchRecordCount = 250000 / estimateRowSize;
for (int i = 0; i < config.getTypes().length; i++) {
final MajorType type = config.getTypes()[i].getMajorType();
final MaterializedField field = getVector(config.getTypes()[i].getName(), type);
final Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
valueVectors[i] = output.addField(field, vvClass);
}
} catch (SchemaChangeException e) {
throw new ExecutionSetupException("Failure while setting up fields", e);
}
}
Aggregations