use of org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig in project drill by apache.
the class BaseCsvTest method setup.
protected static void setup(boolean skipFirstLine, boolean extractHeader, int maxParallelization) throws Exception {
startCluster(ClusterFixture.builder(dirTestWatcher).maxParallelization(maxParallelization));
// Set up CSV storage plugin using headers.
TextFormatConfig csvFormat = new TextFormatConfig(null, // line delimiter
null, // field delimiter
null, // quote
null, // escape
null, // comment
null, skipFirstLine, extractHeader);
testDir = cluster.makeDataDir("data", "csv", csvFormat);
}
use of org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig in project drill by apache.
the class TestTextWriter method setup.
@BeforeClass
public static void setup() throws Exception {
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
startCluster(builder);
Map<String, FormatPluginConfig> formats = new HashMap<>();
formats.put("csv", new TextFormatConfig(Collections.singletonList("csv"), // line delimiter
"\n", // field delimiter
",", // quote
"\"", // escape
"\"", // comment
null, // skip first line
false, // extract header
true));
formats.put("tsv", new TextFormatConfig(Collections.singletonList("tsv"), // line delimiter
"\n", // field delimiter
"\t", // quote
"\"", // escape
"\"", // comment
null, // skip first line
false, // extract header
true));
formats.put("custom", new TextFormatConfig(Collections.singletonList("custom"), // line delimiter
"!", // field delimiter
"_", // quote
"$", // escape
"^", // comment
null, // skip first line
false, // extract header
true));
cluster.defineFormats("dfs", formats);
}
use of org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig in project drill by apache.
the class StoragePluginTestUtils method configureFormatPlugins.
public static void configureFormatPlugins(StoragePluginRegistry pluginRegistry, String storagePlugin) throws PluginException {
FileSystemConfig fileSystemConfig = (FileSystemConfig) pluginRegistry.getStoredConfig(storagePlugin);
Map<String, FormatPluginConfig> newFormats = new HashMap<>();
Optional.ofNullable(fileSystemConfig.getFormats()).ifPresent(newFormats::putAll);
newFormats.put("txt", new TextFormatConfig(ImmutableList.of("txt"), null, "\u0000", null, null, null, null, null));
newFormats.put("ssv", new TextFormatConfig(ImmutableList.of("ssv"), null, " ", null, null, null, null, null));
newFormats.put("psv", new TextFormatConfig(ImmutableList.of("tbl"), null, "|", null, null, null, null, null));
SequenceFileFormatConfig seqConfig = new SequenceFileFormatConfig(ImmutableList.of("seq"));
newFormats.put("sequencefile", seqConfig);
newFormats.put("csvh-test", new TextFormatConfig(ImmutableList.of("csvh-test"), null, ",", null, null, null, true, true));
FileSystemConfig newFileSystemConfig = new FileSystemConfig(fileSystemConfig.getConnection(), fileSystemConfig.getConfig(), fileSystemConfig.getWorkspaces(), newFormats, PlainCredentialsProvider.EMPTY_CREDENTIALS_PROVIDER);
newFileSystemConfig.setEnabled(fileSystemConfig.isEnabled());
pluginRegistry.put(storagePlugin, newFileSystemConfig);
}
use of org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig in project drill by apache.
the class TestTextWriter method testLineDelimiterLengthLimit.
@Test
public void testLineDelimiterLengthLimit() throws Exception {
TextFormatConfig incorrect = new TextFormatConfig(null, // line delimiter
"end", // field delimiter
null, // quote
null, // escape
null, // comment
null, // skip first line
false, // extract header
false);
cluster.defineFormat("dfs", "incorrect", incorrect);
client.alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "incorrect");
String tableName = "incorrect_line_delimiter_table";
String fullTableName = String.format("dfs.tmp.`%s`", tableName);
tablesToDrop.add(fullTableName);
// univocity-parsers allow only 1 - 2 characters line separators
thrown.expect(UserException.class);
thrown.expectMessage("Invalid line separator");
queryBuilder().sql("create table %s as select 1 as id from (values(1))", fullTableName).run();
}
Aggregations