Search in sources :

Example 71 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.

the class KuduScanBatchCreator method getBatch.

@Override
public ScanBatch getBatch(ExecutorFragmentContext context, KuduSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
    Preconditions.checkArgument(children.isEmpty());
    List<RecordReader> readers = new LinkedList<>();
    List<SchemaPath> columns = null;
    for (KuduSubScan.KuduSubScanSpec scanSpec : subScan.getTabletScanSpecList()) {
        try {
            if ((columns = subScan.getColumns()) == null) {
                columns = GroupScan.ALL_COLUMNS;
            }
            readers.add(new KuduRecordReader(subScan.getStorageEngine().getClient(), scanSpec, columns));
        } catch (Exception e1) {
            throw new ExecutionSetupException(e1);
        }
    }
    return new ScanBatch(subScan, context, readers);
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SchemaPath(org.apache.drill.common.expression.SchemaPath) RecordReader(org.apache.drill.exec.store.RecordReader) ScanBatch(org.apache.drill.exec.physical.impl.ScanBatch) LinkedList(java.util.LinkedList) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException)

Example 72 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.

the class OpenTSDBBatchCreator method getBatch.

@Override
public CloseableRecordBatch getBatch(ExecutorFragmentContext context, OpenTSDBSubScan subScan, List<RecordBatch> children) throws ExecutionSetupException {
    List<RecordReader> readers = new LinkedList<>();
    List<SchemaPath> columns;
    for (OpenTSDBSubScan.OpenTSDBSubScanSpec scanSpec : subScan.getTabletScanSpecList()) {
        try {
            if ((columns = subScan.getColumns()) == null) {
                columns = GroupScan.ALL_COLUMNS;
            }
            readers.add(new OpenTSDBRecordReader(subScan.getStorageEngine().getClient(), scanSpec, columns));
        } catch (Exception e) {
            throw new ExecutionSetupException(e);
        }
    }
    return new ScanBatch(subScan, context, readers);
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SchemaPath(org.apache.drill.common.expression.SchemaPath) RecordReader(org.apache.drill.exec.store.RecordReader) ScanBatch(org.apache.drill.exec.physical.impl.ScanBatch) LinkedList(java.util.LinkedList) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException)

Example 73 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.

the class DynamicRootSchema method loadSchemaFactory.

/**
 * load schema factory(storage plugin) for schemaName
 * @param schemaName
 * @param caseSensitive
 */
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
    try {
        SchemaPlus thisPlus = this.plus();
        StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
        if (plugin != null) {
            plugin.registerSchemas(schemaConfig, thisPlus);
            return;
        }
        // Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
        String[] paths = schemaName.split("\\.");
        if (paths.length == 2) {
            plugin = getSchemaFactories().getPlugin(paths[0]);
            if (plugin == null) {
                return;
            }
            // Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
            // register schema for this storage plugin to 'this'.
            plugin.registerSchemas(schemaConfig, thisPlus);
            // Load second level schemas for this storage plugin
            final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
            final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
            for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
                secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
            }
            for (SchemaPlus schema : secondLevelSchemas) {
                org.apache.drill.exec.store.AbstractSchema drillSchema;
                try {
                    drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
                } catch (ClassCastException e) {
                    throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
                }
                SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
                thisPlus.add(wrapper.getName(), wrapper);
            }
        }
    } catch (ExecutionSetupException | IOException ex) {
        logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
    }
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SubSchemaWrapper(org.apache.drill.exec.store.SubSchemaWrapper) SchemaPlus(org.apache.calcite.schema.SchemaPlus) IOException(java.io.IOException) StoragePlugin(org.apache.drill.exec.store.StoragePlugin) AbstractSchema(org.apache.calcite.schema.impl.AbstractSchema)

Example 74 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.

the class DynamicRootSchema method loadSchemaFactory.

/**
 * load schema factory(storage plugin) for schemaName
 * @param schemaName
 * @param caseSensitive
 */
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
    try {
        SchemaPlus thisPlus = this.plus();
        StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
        if (plugin != null) {
            plugin.registerSchemas(schemaConfig, thisPlus);
            return;
        }
        // Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
        String[] paths = schemaName.split("\\.");
        if (paths.length == 2) {
            plugin = getSchemaFactories().getPlugin(paths[0]);
            if (plugin == null) {
                return;
            }
            // Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
            // register schema for this storage plugin to 'this'.
            plugin.registerSchemas(schemaConfig, thisPlus);
            // Load second level schemas for this storage plugin
            final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
            final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
            for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
                secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
            }
            for (SchemaPlus schema : secondLevelSchemas) {
                org.apache.drill.exec.store.AbstractSchema drillSchema;
                try {
                    drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
                } catch (ClassCastException e) {
                    throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
                }
                SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
                thisPlus.add(wrapper.getName(), wrapper);
            }
        }
    } catch (ExecutionSetupException | IOException ex) {
        logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
    }
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SubSchemaWrapper(org.apache.drill.exec.store.SubSchemaWrapper) SchemaPlus(org.apache.calcite.schema.SchemaPlus) IOException(java.io.IOException) StoragePlugin(org.apache.drill.exec.store.StoragePlugin) AbstractSchema(org.apache.calcite.schema.impl.AbstractSchema)

Example 75 with ExecutionSetupException

use of org.apache.drill.common.exceptions.ExecutionSetupException in project drill by axbaretto.

the class MockRecordReader method setup.

@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
    try {
        final int estimateRowSize = getEstimatedRecordSize(config.getTypes());
        if (config.getTypes() == null) {
            return;
        }
        valueVectors = new ValueVector[config.getTypes().length];
        batchRecordCount = 250000 / estimateRowSize;
        for (int i = 0; i < config.getTypes().length; i++) {
            final MajorType type = config.getTypes()[i].getMajorType();
            final MaterializedField field = getVector(config.getTypes()[i].getName(), type);
            final Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
            valueVectors[i] = output.addField(field, vvClass);
        }
    } catch (SchemaChangeException e) {
        throw new ExecutionSetupException("Failure while setting up fields", e);
    }
}
Also used : ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) MajorType(org.apache.drill.common.types.TypeProtos.MajorType) MaterializedField(org.apache.drill.exec.record.MaterializedField)

Aggregations

ExecutionSetupException (org.apache.drill.common.exceptions.ExecutionSetupException)94 IOException (java.io.IOException)43 ScanBatch (org.apache.drill.exec.physical.impl.ScanBatch)26 SchemaPath (org.apache.drill.common.expression.SchemaPath)25 RecordReader (org.apache.drill.exec.store.RecordReader)24 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)22 LinkedList (java.util.LinkedList)16 Map (java.util.Map)14 MaterializedField (org.apache.drill.exec.record.MaterializedField)13 ExecutionException (java.util.concurrent.ExecutionException)10 DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)10 OperatorContext (org.apache.drill.exec.ops.OperatorContext)8 UserException (org.apache.drill.common.exceptions.UserException)7 MajorType (org.apache.drill.common.types.TypeProtos.MajorType)7 JobConf (org.apache.hadoop.mapred.JobConf)7 HashMap (java.util.HashMap)6 List (java.util.List)6 OutOfMemoryException (org.apache.drill.exec.exception.OutOfMemoryException)6 VectorContainerWriter (org.apache.drill.exec.vector.complex.impl.VectorContainerWriter)6 Path (org.apache.hadoop.fs.Path)6