Search in sources :

Example 6 with FormatPluginConfig

use of org.apache.drill.common.logical.FormatPluginConfig in project drill by apache.

the class TestParquetWriter method testFormatConfigOpts.

@Test
public void testFormatConfigOpts() throws Exception {
    FileSystemConfig pluginConfig = (FileSystemConfig) cluster.storageRegistry().copyConfig("dfs");
    FormatPluginConfig backupConfig = pluginConfig.getFormats().get("parquet");
    cluster.defineFormat("dfs", "parquet", new ParquetFormatConfig(false, true, 123, 456, true, "snappy", "binary", true, "v2"));
    try {
        String query = "select * from dfs.`parquet/int96_dict_change`";
        queryBuilder().sql(query).jsonPlanMatcher().include("\"autoCorrectCorruptDates\" : false").include("\"enableStringsSignedMinMax\" : true").include("\"blockSize\" : 123").include("\"pageSize\" : 456").include("\"useSingleFSBlock\" : true").include("\"writerCompressionType\" : \"snappy\"").include("\"writerUsePrimitivesForDecimals\" : true").include("\"writerFormatVersion\" : \"v2\"").match();
    } finally {
        cluster.defineFormat("dfs", "parquet", backupConfig);
    }
}
Also used : FormatPluginConfig(org.apache.drill.common.logical.FormatPluginConfig) FileSystemConfig(org.apache.drill.exec.store.dfs.FileSystemConfig) ParquetFormatConfig(org.apache.drill.exec.store.parquet.ParquetFormatConfig) ClusterTest(org.apache.drill.test.ClusterTest) SlowTest(org.apache.drill.categories.SlowTest) ParquetTest(org.apache.drill.categories.ParquetTest) Test(org.junit.Test) UnlikelyTest(org.apache.drill.categories.UnlikelyTest)

Example 7 with FormatPluginConfig

use of org.apache.drill.common.logical.FormatPluginConfig in project drill by apache.

the class TestTextWriter method setup.

@BeforeClass
public static void setup() throws Exception {
    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
    startCluster(builder);
    Map<String, FormatPluginConfig> formats = new HashMap<>();
    formats.put("csv", new TextFormatConfig(Collections.singletonList("csv"), // line delimiter
    "\n", // field delimiter
    ",", // quote
    "\"", // escape
    "\"", // comment
    null, // skip first line
    false, // extract header
    true));
    formats.put("tsv", new TextFormatConfig(Collections.singletonList("tsv"), // line delimiter
    "\n", // field delimiter
    "\t", // quote
    "\"", // escape
    "\"", // comment
    null, // skip first line
    false, // extract header
    true));
    formats.put("custom", new TextFormatConfig(Collections.singletonList("custom"), // line delimiter
    "!", // field delimiter
    "_", // quote
    "$", // escape
    "^", // comment
    null, // skip first line
    false, // extract header
    true));
    cluster.defineFormats("dfs", formats);
}
Also used : HashMap(java.util.HashMap) FormatPluginConfig(org.apache.drill.common.logical.FormatPluginConfig) TextFormatConfig(org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig) ClusterFixtureBuilder(org.apache.drill.test.ClusterFixtureBuilder) BeforeClass(org.junit.BeforeClass)

Example 8 with FormatPluginConfig

use of org.apache.drill.common.logical.FormatPluginConfig in project drill by apache.

the class ClusterFixture method defineWorkspace.

private void defineWorkspace(Drillbit drillbit, String pluginName, String schemaName, String path, String defaultFormat, FormatPluginConfig format, boolean writable) throws PluginException {
    final StoragePluginRegistry pluginRegistry = drillbit.getContext().getStorage();
    final FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getStoredConfig(pluginName);
    final WorkspaceConfig newTmpWSConfig = new WorkspaceConfig(path, writable, defaultFormat, false);
    Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
    Optional.ofNullable(pluginConfig.getWorkspaces()).ifPresent(newWorkspaces::putAll);
    newWorkspaces.put(schemaName, newTmpWSConfig);
    Map<String, FormatPluginConfig> newFormats = new HashMap<>();
    Optional.ofNullable(pluginConfig.getFormats()).ifPresent(newFormats::putAll);
    Optional.ofNullable(format).ifPresent(f -> newFormats.put(defaultFormat, f));
    updatePlugin(pluginRegistry, pluginName, pluginConfig, newWorkspaces, newFormats);
}
Also used : StoragePluginRegistry(org.apache.drill.exec.store.StoragePluginRegistry) HashMap(java.util.HashMap) FormatPluginConfig(org.apache.drill.common.logical.FormatPluginConfig) WorkspaceConfig(org.apache.drill.exec.store.dfs.WorkspaceConfig) FileSystemConfig(org.apache.drill.exec.store.dfs.FileSystemConfig)

Example 9 with FormatPluginConfig

use of org.apache.drill.common.logical.FormatPluginConfig in project drill by apache.

the class FormatPluginOptionsDescriptor method createConfigForTable.

/**
   * creates an instance of the FormatPluginConfig based on the passed parameters
   * @param t the signature and the parameters passed to the table function
   * @return the corresponding config
   */
FormatPluginConfig createConfigForTable(TableInstance t) {
    // Per the constructor, the first param is always "type"
    TableParamDef typeParamDef = t.sig.params.get(0);
    Object typeParam = t.params.get(0);
    if (!typeParamDef.name.equals("type") || typeParamDef.type != String.class || !(typeParam instanceof String) || !typeName.equalsIgnoreCase((String) typeParam)) {
        // if we reach here, there's a bug as all signatures generated start with a type parameter
        throw UserException.parseError().message("This function signature is not supported: %s\n" + "expecting %s", t.presentParams(), this.presentParams()).addContext("table", t.sig.name).build(logger);
    }
    FormatPluginConfig config;
    try {
        config = pluginConfigClass.newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
        throw UserException.parseError(e).message("configuration for format of type %s can not be created (class: %s)", this.typeName, pluginConfigClass.getName()).addContext("table", t.sig.name).build(logger);
    }
    for (int i = 1; i < t.params.size(); i++) {
        Object param = t.params.get(i);
        if (param == null) {
            // when null is passed, we leave the default defined in the config class
            continue;
        }
        if (param instanceof String) {
            // normalize Java literals, ex: \t, \n, \r
            param = StringEscapeUtils.unescapeJava((String) param);
        }
        TableParamDef paramDef = t.sig.params.get(i);
        TableParamDef expectedParamDef = this.functionParamsByName.get(paramDef.name);
        if (expectedParamDef == null || expectedParamDef.type != paramDef.type) {
            throw UserException.parseError().message("The parameters provided are not applicable to the type specified:\n" + "provided: %s\nexpected: %s", t.presentParams(), this.presentParams()).addContext("table", t.sig.name).build(logger);
        }
        try {
            Field field = pluginConfigClass.getField(paramDef.name);
            field.setAccessible(true);
            if (field.getType() == char.class && param instanceof String) {
                String stringParam = (String) param;
                if (stringParam.length() != 1) {
                    throw UserException.parseError().message("Expected single character but was String: %s", stringParam).addContext("table", t.sig.name).addContext("parameter", paramDef.name).build(logger);
                }
                param = stringParam.charAt(0);
            }
            field.set(config, param);
        } catch (IllegalAccessException | NoSuchFieldException | SecurityException e) {
            throw UserException.parseError(e).message("can not set value %s to parameter %s: %s", param, paramDef.name, paramDef.type).addContext("table", t.sig.name).addContext("parameter", paramDef.name).build(logger);
        }
    }
    return config;
}
Also used : Field(java.lang.reflect.Field) TableParamDef(org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableParamDef) FormatPluginConfig(org.apache.drill.common.logical.FormatPluginConfig)

Example 10 with FormatPluginConfig

use of org.apache.drill.common.logical.FormatPluginConfig in project drill by apache.

the class TestLogReaderIssue method setup.

@BeforeClass
public static void setup() throws Exception {
    BaseCsvTest.setup(false, false);
    File rootDir = new File(testDir, PART_DIR);
    rootDir.mkdir();
    buildFile(new File(rootDir, "issue7853.log"), mock_issue7853);
    buildFile(new File(rootDir, "issue7853.log2"), mock_issue7853);
    Map<String, FormatPluginConfig> formats = new HashMap<>();
    formats.put("log", issue7853Config());
    formats.put("log2", issue7853UseValidDatetimeFormatConfig());
    cluster.defineFormats("dfs", formats);
}
Also used : HashMap(java.util.HashMap) FormatPluginConfig(org.apache.drill.common.logical.FormatPluginConfig) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Aggregations

FormatPluginConfig (org.apache.drill.common.logical.FormatPluginConfig)16 FileSystemConfig (org.apache.drill.exec.store.dfs.FileSystemConfig)7 HashMap (java.util.HashMap)6 ParquetFormatConfig (org.apache.drill.exec.store.parquet.ParquetFormatConfig)6 DrillFileSystem (org.apache.drill.exec.store.dfs.DrillFileSystem)4 FileSystemPlugin (org.apache.drill.exec.store.dfs.FileSystemPlugin)4 FormatSelection (org.apache.drill.exec.store.dfs.FormatSelection)4 NamedFormatPluginConfig (org.apache.drill.exec.store.dfs.NamedFormatPluginConfig)4 Path (org.apache.hadoop.fs.Path)4 BeforeClass (org.junit.BeforeClass)4 Table (org.apache.calcite.schema.Table)3 SchemaPath (org.apache.drill.common.expression.SchemaPath)3 DrillTable (org.apache.drill.exec.planner.logical.DrillTable)3 IOException (java.io.IOException)2 Field (java.lang.reflect.Field)2 SchemaPlus (org.apache.calcite.schema.SchemaPlus)2 StoragePluginConfig (org.apache.drill.common.logical.StoragePluginConfig)2 SqlRefreshMetadata (org.apache.drill.exec.planner.sql.parser.SqlRefreshMetadata)2 StoragePluginRegistry (org.apache.drill.exec.store.StoragePluginRegistry)2 FileSelection (org.apache.drill.exec.store.dfs.FileSelection)2