use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.
the class HBaseScanBatchCreator method getBatch.
@Override
public ScanBatch getBatch(FragmentContext context, HBaseSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
Preconditions.checkArgument(children.isEmpty());
List<RecordReader> readers = Lists.newArrayList();
List<SchemaPath> columns = null;
for (HBaseSubScan.HBaseSubScanSpec scanSpec : subScan.getRegionScanSpecList()) {
try {
if ((columns = subScan.getColumns()) == null) {
columns = GroupScan.ALL_COLUMNS;
}
readers.add(new HBaseRecordReader(subScan.getStorageEngine().getConnection(), scanSpec, columns, context));
} catch (Exception e1) {
throw new ExecutionSetupException(e1);
}
}
return new ScanBatch(subScan, context, readers.iterator());
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.
the class StoragePluginRegistryImpl method getFormatPlugin.
@SuppressWarnings("resource")
@Override
public FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig, FormatPluginConfig formatConfig) throws ExecutionSetupException {
StoragePlugin p = getPlugin(storageConfig);
if (!(p instanceof FileSystemPlugin)) {
throw new ExecutionSetupException(String.format("You tried to request a format plugin for a storage plugin that wasn't of type " + "FileSystemPlugin. The actual type of plugin was %s.", p.getClass().getName()));
}
FileSystemPlugin storage = (FileSystemPlugin) p;
return storage.getFormatPlugin(formatConfig);
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.
the class StoragePluginRegistryImpl method createPlugins.
@SuppressWarnings("resource")
private Map<String, StoragePlugin> createPlugins() throws DrillbitStartupException {
try {
/*
* Check if the storage plugins system table has any entries. If not, load the boostrap-storage-plugin file into
* the system table.
*/
if (!pluginSystemTable.getAll().hasNext()) {
// bootstrap load the config since no plugins are stored.
logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
Collection<URL> urls = ClassPathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false);
if (urls != null && !urls.isEmpty()) {
logger.info("Loading the storage plugin configs from URLs {}.", urls);
Map<String, URL> pluginURLMap = Maps.newHashMap();
for (URL url : urls) {
String pluginsData = Resources.toString(url, Charsets.UTF_8);
StoragePlugins plugins = lpPersistence.getMapper().readValue(pluginsData, StoragePlugins.class);
for (Map.Entry<String, StoragePluginConfig> config : plugins) {
if (!definePluginConfig(config.getKey(), config.getValue())) {
logger.warn("Duplicate plugin instance '{}' defined in [{}, {}], ignoring the later one.", config.getKey(), pluginURLMap.get(config.getKey()), url);
continue;
}
pluginURLMap.put(config.getKey(), url);
}
}
} else {
throw new IOException("Failure finding " + ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE);
}
}
Map<String, StoragePlugin> activePlugins = new HashMap<String, StoragePlugin>();
for (Map.Entry<String, StoragePluginConfig> entry : Lists.newArrayList(pluginSystemTable.getAll())) {
String name = entry.getKey();
StoragePluginConfig config = entry.getValue();
if (config.isEnabled()) {
try {
StoragePlugin plugin = create(name, config);
activePlugins.put(name, plugin);
} catch (ExecutionSetupException e) {
logger.error("Failure while setting up StoragePlugin with name: '{}', disabling.", name, e);
config.setEnabled(false);
pluginSystemTable.put(name, config);
}
}
}
activePlugins.put(INFORMATION_SCHEMA_PLUGIN, new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, INFORMATION_SCHEMA_PLUGIN));
activePlugins.put(SYS_PLUGIN, new SystemTablePlugin(SystemTablePluginConfig.INSTANCE, context, SYS_PLUGIN));
return activePlugins;
} catch (IOException e) {
logger.error("Failure setting up storage plugins. Drillbit exiting.", e);
throw new IllegalStateException(e);
}
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.
the class AvroRecordReader method setup.
@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
writer = new VectorContainerWriter(output);
try {
reader = getReader(hadoop, fs);
logger.debug("Processing file : {}, start position : {}, end position : {} ", hadoop, start, end);
reader.sync(this.start);
} catch (IOException e) {
throw new ExecutionSetupException(e);
}
}
use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.
the class EasyFormatPlugin method getReaderBatch.
@SuppressWarnings("resource")
CloseableRecordBatch getReaderBatch(FragmentContext context, EasySubScan scan) throws ExecutionSetupException {
final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(context, scan.getColumns());
if (!columnExplorer.isStarQuery()) {
scan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(), columnExplorer.getTableColumns(), scan.getSelectionRoot());
scan.setOperatorId(scan.getOperatorId());
}
OperatorContext oContext = context.newOperatorContext(scan);
final DrillFileSystem dfs;
try {
dfs = oContext.newFileSystem(fsConf);
} catch (IOException e) {
throw new ExecutionSetupException(String.format("Failed to create FileSystem: %s", e.getMessage()), e);
}
List<RecordReader> readers = Lists.newArrayList();
List<Map<String, String>> implicitColumns = Lists.newArrayList();
Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
for (FileWork work : scan.getWorkUnits()) {
RecordReader recordReader = getRecordReader(context, dfs, work, scan.getColumns(), scan.getUserName());
readers.add(recordReader);
Map<String, String> implicitValues = columnExplorer.populateImplicitColumns(work, scan.getSelectionRoot());
implicitColumns.add(implicitValues);
if (implicitValues.size() > mapWithMaxColumns.size()) {
mapWithMaxColumns = implicitValues;
}
}
// all readers should have the same number of implicit columns, add missing ones with value null
Map<String, String> diff = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
for (Map<String, String> map : implicitColumns) {
map.putAll(Maps.difference(map, diff).entriesOnlyOnRight());
}
return new ScanBatch(scan, context, oContext, readers.iterator(), implicitColumns);
}
Aggregations