Search in sources :

Example 51 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.

the class HBaseScanBatchCreator method getBatch.

@Override
public ScanBatch getBatch(FragmentContext context, HBaseSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
    Preconditions.checkArgument(children.isEmpty());
    List<RecordReader> readers = Lists.newArrayList();
    List<SchemaPath> columns = null;
    for (HBaseSubScan.HBaseSubScanSpec scanSpec : subScan.getRegionScanSpecList()) {
        try {
            if ((columns = subScan.getColumns()) == null) {
                columns = GroupScan.ALL_COLUMNS;
            }
            readers.add(new HBaseRecordReader(subScan.getStorageEngine().getConnection(), scanSpec, columns, context));
        } catch (Exception e1) {
            throw new ExecutionSetupException(e1);
        }
    }
    return new ScanBatch(subScan, context, readers.iterator());
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SchemaPath(org.apache.drill.common.expression.SchemaPath) RecordReader(org.apache.drill.exec.store.RecordReader) ScanBatch(org.apache.drill.exec.physical.impl.ScanBatch) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException)

Example 52 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.

the class StoragePluginRegistryImpl method getFormatPlugin.

@SuppressWarnings("resource")
@Override
public FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig, FormatPluginConfig formatConfig) throws ExecutionSetupException {
    StoragePlugin p = getPlugin(storageConfig);
    if (!(p instanceof FileSystemPlugin)) {
        throw new ExecutionSetupException(String.format("You tried to request a format plugin for a storage plugin that wasn't of type " + "FileSystemPlugin. The actual type of plugin was %s.", p.getClass().getName()));
    }
    FileSystemPlugin storage = (FileSystemPlugin) p;
    return storage.getFormatPlugin(formatConfig);
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) FileSystemPlugin(org.apache.drill.exec.store.dfs.FileSystemPlugin) InfoSchemaStoragePlugin(org.apache.drill.exec.store.ischema.InfoSchemaStoragePlugin)

Example 53 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.

the class StoragePluginRegistryImpl method createPlugins.

@SuppressWarnings("resource")
private Map<String, StoragePlugin> createPlugins() throws DrillbitStartupException {
    try {
        /*
       * Check if the storage plugins system table has any entries. If not, load the boostrap-storage-plugin file into
       * the system table.
       */
        if (!pluginSystemTable.getAll().hasNext()) {
            // bootstrap load the config since no plugins are stored.
            logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
            Collection<URL> urls = ClassPathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false);
            if (urls != null && !urls.isEmpty()) {
                logger.info("Loading the storage plugin configs from URLs {}.", urls);
                Map<String, URL> pluginURLMap = Maps.newHashMap();
                for (URL url : urls) {
                    String pluginsData = Resources.toString(url, Charsets.UTF_8);
                    StoragePlugins plugins = lpPersistence.getMapper().readValue(pluginsData, StoragePlugins.class);
                    for (Map.Entry<String, StoragePluginConfig> config : plugins) {
                        if (!definePluginConfig(config.getKey(), config.getValue())) {
                            logger.warn("Duplicate plugin instance '{}' defined in [{}, {}], ignoring the later one.", config.getKey(), pluginURLMap.get(config.getKey()), url);
                            continue;
                        }
                        pluginURLMap.put(config.getKey(), url);
                    }
                }
            } else {
                throw new IOException("Failure finding " + ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE);
            }
        }
        Map<String, StoragePlugin> activePlugins = new HashMap<String, StoragePlugin>();
        for (Map.Entry<String, StoragePluginConfig> entry : Lists.newArrayList(pluginSystemTable.getAll())) {
            String name = entry.getKey();
            StoragePluginConfig config = entry.getValue();
            if (config.isEnabled()) {
                try {
                    StoragePlugin plugin = create(name, config);
                    activePlugins.put(name, plugin);
                } catch (ExecutionSetupException e) {
                    logger.error("Failure while setting up StoragePlugin with name: '{}', disabling.", name, e);
                    config.setEnabled(false);
                    pluginSystemTable.put(name, config);
                }
            }
        }
        activePlugins.put(INFORMATION_SCHEMA_PLUGIN, new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, INFORMATION_SCHEMA_PLUGIN));
        activePlugins.put(SYS_PLUGIN, new SystemTablePlugin(SystemTablePluginConfig.INSTANCE, context, SYS_PLUGIN));
        return activePlugins;
    } catch (IOException e) {
        logger.error("Failure setting up storage plugins.  Drillbit exiting.", e);
        throw new IllegalStateException(e);
    }
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) HashMap(java.util.HashMap) SystemTablePlugin(org.apache.drill.exec.store.sys.SystemTablePlugin) IOException(java.io.IOException) URL(java.net.URL) InfoSchemaStoragePlugin(org.apache.drill.exec.store.ischema.InfoSchemaStoragePlugin) StoragePlugins(org.apache.drill.exec.planner.logical.StoragePlugins) StoragePluginConfig(org.apache.drill.common.logical.StoragePluginConfig) InfoSchemaStoragePlugin(org.apache.drill.exec.store.ischema.InfoSchemaStoragePlugin) HashMap(java.util.HashMap) Map(java.util.Map) InfoSchemaConfig(org.apache.drill.exec.store.ischema.InfoSchemaConfig)

Example 54 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.

the class AvroRecordReader method setup.

@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
    writer = new VectorContainerWriter(output);
    try {
        reader = getReader(hadoop, fs);
        logger.debug("Processing file : {}, start position : {}, end position : {} ", hadoop, start, end);
        reader.sync(this.start);
    } catch (IOException e) {
        throw new ExecutionSetupException(e);
    }
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) VectorContainerWriter(org.apache.drill.exec.vector.complex.impl.VectorContainerWriter) IOException(java.io.IOException)

Example 55 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by apache.

the class EasyFormatPlugin method getReaderBatch.

@SuppressWarnings("resource")
CloseableRecordBatch getReaderBatch(FragmentContext context, EasySubScan scan) throws ExecutionSetupException {
    final ImplicitColumnExplorer columnExplorer = new ImplicitColumnExplorer(context, scan.getColumns());
    if (!columnExplorer.isStarQuery()) {
        scan = new EasySubScan(scan.getUserName(), scan.getWorkUnits(), scan.getFormatPlugin(), columnExplorer.getTableColumns(), scan.getSelectionRoot());
        scan.setOperatorId(scan.getOperatorId());
    }
    OperatorContext oContext = context.newOperatorContext(scan);
    final DrillFileSystem dfs;
    try {
        dfs = oContext.newFileSystem(fsConf);
    } catch (IOException e) {
        throw new ExecutionSetupException(String.format("Failed to create FileSystem: %s", e.getMessage()), e);
    }
    List<RecordReader> readers = Lists.newArrayList();
    List<Map<String, String>> implicitColumns = Lists.newArrayList();
    Map<String, String> mapWithMaxColumns = Maps.newLinkedHashMap();
    for (FileWork work : scan.getWorkUnits()) {
        RecordReader recordReader = getRecordReader(context, dfs, work, scan.getColumns(), scan.getUserName());
        readers.add(recordReader);
        Map<String, String> implicitValues = columnExplorer.populateImplicitColumns(work, scan.getSelectionRoot());
        implicitColumns.add(implicitValues);
        if (implicitValues.size() > mapWithMaxColumns.size()) {
            mapWithMaxColumns = implicitValues;
        }
    }
    // all readers should have the same number of implicit columns, add missing ones with value null
    Map<String, String> diff = Maps.transformValues(mapWithMaxColumns, Functions.constant((String) null));
    for (Map<String, String> map : implicitColumns) {
        map.putAll(Maps.difference(map, diff).entriesOnlyOnRight());
    }
    return new ScanBatch(scan, context, oContext, readers.iterator(), implicitColumns);
}
Also used : ImplicitColumnExplorer(org.apache.drill.exec.store.ImplicitColumnExplorer) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) RecordReader(org.apache.drill.exec.store.RecordReader) CompleteFileWork(org.apache.drill.exec.store.schedule.CompleteFileWork) IOException(java.io.IOException) DrillFileSystem(org.apache.drill.exec.store.dfs.DrillFileSystem) OperatorContext(org.apache.drill.exec.ops.OperatorContext) ScanBatch(org.apache.drill.exec.physical.impl.ScanBatch) Map(java.util.Map)

Aggregations

ExecutionSetupException (org.apache.drill.common.exceptions.ExecutionSetupException)94 IOException (java.io.IOException)43 ScanBatch (org.apache.drill.exec.physical.impl.ScanBatch)26 SchemaPath (org.apache.drill.common.expression.SchemaPath)25 RecordReader (org.apache.drill.exec.store.RecordReader)24 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)22 LinkedList (java.util.LinkedList)16 Map (java.util.Map)14 MaterializedField (org.apache.drill.exec.record.MaterializedField)13 ExecutionException (java.util.concurrent.ExecutionException)10 DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)10 OperatorContext (org.apache.drill.exec.ops.OperatorContext)8 UserException (org.apache.drill.common.exceptions.UserException)7 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)7 JobConf (org.apache.hadoop.mapred.JobConf)7 HashMap (java.util.HashMap)6 List (java.util.List)6 OutOfMemoryException (org.apache.drill.exec.exception.OutOfMemoryException)6 VectorContainerWriter (org.apache.drill.exec.vector.complex.impl.VectorContainerWriter)6 Path (org.apache.hadoop.fs.Path)6