use of org.apache.drill.exec.store.StoragePlugin in project drill by axbaretto.
the class DescribeSchemaHandler method getPlan.
@Override
public PhysicalPlan getPlan(SqlNode sqlNode) {
SqlIdentifier schema = ((SqlDescribeSchema) sqlNode).getSchema();
SchemaPlus drillSchema = SchemaUtilites.findSchema(config.getConverter().getDefaultSchema(), schema.names);
if (drillSchema != null) {
StoragePlugin storagePlugin;
try {
storagePlugin = context.getStorage().getPlugin(schema.names.get(0));
} catch (ExecutionSetupException e) {
throw new DrillRuntimeException("Failure while retrieving storage plugin", e);
}
String properties;
try {
final Map configMap = mapper.convertValue(storagePlugin.getConfig(), Map.class);
if (storagePlugin instanceof FileSystemPlugin) {
transformWorkspaces(schema.names, configMap);
}
properties = mapper.writeValueAsString(configMap);
} catch (JsonProcessingException e) {
throw new DrillRuntimeException("Error while trying to convert storage config to json string", e);
}
return DirectPlan.createDirectPlan(context, new DescribeSchemaResult(Joiner.on(".").join(schema.names), properties));
}
throw UserException.validationError().message(String.format("Invalid schema name [%s]", Joiner.on(".").join(schema.names))).build(logger);
}
use of org.apache.drill.exec.store.StoragePlugin in project drill by apache.
the class DynamicRootSchema method loadSchemaFactory.
/**
* Loads schema factory(storage plugin) for specified {@code schemaName}
* @param schemaName the name of the schema
* @param caseSensitive whether matching for the schema name is case sensitive
*/
private void loadSchemaFactory(String schemaName, boolean caseSensitive) {
try {
SchemaPlus schemaPlus = this.plus();
StoragePlugin plugin = storages.getPlugin(schemaName);
if (plugin != null) {
plugin.registerSchemas(schemaConfig, schemaPlus);
return;
}
// Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
List<String> paths = SchemaUtilites.getSchemaPathAsList(schemaName);
if (paths.size() == 2) {
plugin = storages.getPlugin(paths.get(0));
if (plugin == null) {
return;
}
// Looking for the SchemaPlus for the top level (e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
SchemaPlus firstLevelSchema = schemaPlus.getSubSchema(paths.get(0));
if (firstLevelSchema == null) {
// register schema for this storage plugin to 'this'.
plugin.registerSchemas(schemaConfig, schemaPlus);
firstLevelSchema = schemaPlus.getSubSchema(paths.get(0));
}
// Load second level schemas for this storage plugin
List<SchemaPlus> secondLevelSchemas = new ArrayList<>();
for (String secondLevelSchemaName : firstLevelSchema.getSubSchemaNames()) {
secondLevelSchemas.add(firstLevelSchema.getSubSchema(secondLevelSchemaName));
}
for (SchemaPlus schema : secondLevelSchemas) {
org.apache.drill.exec.store.AbstractSchema drillSchema;
try {
drillSchema = schema.unwrap(AbstractSchema.class);
} catch (ClassCastException e) {
throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
}
SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
schemaPlus.add(wrapper.getName(), wrapper);
}
}
} catch (PluginException | IOException ex) {
logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
// We can't proceed further without a schema, throw a runtime exception.
UserException.Builder exceptBuilder = UserException.resourceError(ex).message("Failed to load schema for \"" + schemaName + "\"!").addContext(ex.getClass().getName() + ": " + ex.getMessage()).addContext(// Provide hint if it exists
UserExceptionUtils.getUserHint(ex));
throw exceptBuilder.build(logger);
}
}
use of org.apache.drill.exec.store.StoragePlugin in project drill by apache.
the class DrillStatsTable method readStatistics.
private TableStatistics readStatistics(DrillTable drillTable, Path path) throws IOException {
final Object selection = drillTable.getSelection();
if (selection instanceof FormatSelection) {
StoragePlugin storagePlugin = drillTable.getPlugin();
FormatSelection formatSelection = (FormatSelection) selection;
FormatPluginConfig formatConfig = formatSelection.getFormat();
if (storagePlugin instanceof FileSystemPlugin && (formatConfig instanceof ParquetFormatConfig)) {
FormatPlugin fmtPlugin = storagePlugin.getFormatPlugin(formatConfig);
if (fmtPlugin.supportsStatistics()) {
return fmtPlugin.readStatistics(fs, path);
}
}
}
return null;
}
use of org.apache.drill.exec.store.StoragePlugin in project drill by axbaretto.
the class DynamicRootSchema method loadSchemaFactory.
/**
* load schema factory(storage plugin) for schemaName
* @param schemaName
* @param caseSensitive
*/
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
try {
SchemaPlus thisPlus = this.plus();
StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
if (plugin != null) {
plugin.registerSchemas(schemaConfig, thisPlus);
return;
}
// Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
String[] paths = schemaName.split("\\.");
if (paths.length == 2) {
plugin = getSchemaFactories().getPlugin(paths[0]);
if (plugin == null) {
return;
}
// Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
// register schema for this storage plugin to 'this'.
plugin.registerSchemas(schemaConfig, thisPlus);
// Load second level schemas for this storage plugin
final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
}
for (SchemaPlus schema : secondLevelSchemas) {
org.apache.drill.exec.store.AbstractSchema drillSchema;
try {
drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
} catch (ClassCastException e) {
throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
}
SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
thisPlus.add(wrapper.getName(), wrapper);
}
}
} catch (ExecutionSetupException | IOException ex) {
logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
}
}
use of org.apache.drill.exec.store.StoragePlugin in project drill by axbaretto.
the class DynamicRootSchema method loadSchemaFactory.
/**
* load schema factory(storage plugin) for schemaName
* @param schemaName
* @param caseSensitive
*/
public void loadSchemaFactory(String schemaName, boolean caseSensitive) {
try {
SchemaPlus thisPlus = this.plus();
StoragePlugin plugin = getSchemaFactories().getPlugin(schemaName);
if (plugin != null) {
plugin.registerSchemas(schemaConfig, thisPlus);
return;
}
// Could not find the plugin of schemaName. The schemaName could be `dfs.tmp`, a 2nd level schema under 'dfs'
String[] paths = schemaName.split("\\.");
if (paths.length == 2) {
plugin = getSchemaFactories().getPlugin(paths[0]);
if (plugin == null) {
return;
}
// Found the storage plugin for first part(e.g. 'dfs') of schemaName (e.g. 'dfs.tmp')
// register schema for this storage plugin to 'this'.
plugin.registerSchemas(schemaConfig, thisPlus);
// Load second level schemas for this storage plugin
final SchemaPlus firstlevelSchema = thisPlus.getSubSchema(paths[0]);
final List<SchemaPlus> secondLevelSchemas = Lists.newArrayList();
for (String secondLevelSchemaName : firstlevelSchema.getSubSchemaNames()) {
secondLevelSchemas.add(firstlevelSchema.getSubSchema(secondLevelSchemaName));
}
for (SchemaPlus schema : secondLevelSchemas) {
org.apache.drill.exec.store.AbstractSchema drillSchema;
try {
drillSchema = schema.unwrap(org.apache.drill.exec.store.AbstractSchema.class);
} catch (ClassCastException e) {
throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
}
SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
thisPlus.add(wrapper.getName(), wrapper);
}
}
} catch (ExecutionSetupException | IOException ex) {
logger.warn("Failed to load schema for \"" + schemaName + "\"!", ex);
}
}
Aggregations