use of org.apache.flink.table.sources.CsvTableSource in project flink by apache.
the class TpcdsTestProgram method prepareTableEnv.
/**
* Prepare TableEnvironment for query.
*
* @param sourceTablePath
* @return
*/
private static TableEnvironment prepareTableEnv(String sourceTablePath, Boolean useTableStats) {
// init Table Env
EnvironmentSettings environmentSettings = EnvironmentSettings.inBatchMode();
TableEnvironment tEnv = TableEnvironment.create(environmentSettings);
// config Optimizer parameters
// TODO use the default shuffle mode of batch runtime mode once FLINK-23470 is implemented
tEnv.getConfig().getConfiguration().setString(ExecutionConfigOptions.TABLE_EXEC_SHUFFLE_MODE, GlobalStreamExchangeMode.POINTWISE_EDGES_PIPELINED.toString());
tEnv.getConfig().getConfiguration().setLong(OptimizerConfigOptions.TABLE_OPTIMIZER_BROADCAST_JOIN_THRESHOLD, 10 * 1024 * 1024);
tEnv.getConfig().getConfiguration().setBoolean(OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED, true);
// register TPC-DS tables
TPCDS_TABLES.forEach(table -> {
TpcdsSchema schema = TpcdsSchemaProvider.getTableSchema(table);
CsvTableSource.Builder builder = CsvTableSource.builder();
builder.path(sourceTablePath + FILE_SEPARATOR + table + DATA_SUFFIX);
for (int i = 0; i < schema.getFieldNames().size(); i++) {
builder.field(schema.getFieldNames().get(i), TypeConversions.fromDataTypeToLegacyInfo(schema.getFieldTypes().get(i)));
}
builder.fieldDelimiter(COL_DELIMITER);
builder.emptyColumnAsNull();
builder.lineDelimiter("\n");
CsvTableSource tableSource = builder.build();
ConnectorCatalogTable catalogTable = ConnectorCatalogTable.source(tableSource, true);
tEnv.getCatalog(tEnv.getCurrentCatalog()).ifPresent(catalog -> {
try {
catalog.createTable(new ObjectPath(tEnv.getCurrentDatabase(), table), catalogTable, false);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
});
// register statistics info
if (useTableStats) {
TpcdsStatsProvider.registerTpcdsStats(tEnv);
}
return tEnv;
}
use of org.apache.flink.table.sources.CsvTableSource in project flink by apache.
the class CsvTableSinkFactoryTest method testBatchTableSourceFactory.
@Test
public void testBatchTableSourceFactory() {
DescriptorProperties descriptor = createDescriptor(testingSchema);
TableSource sink = createTableSource(descriptor);
assertTrue(sink instanceof CsvTableSource);
assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
use of org.apache.flink.table.sources.CsvTableSource in project flink by apache.
the class CsvTableSinkFactoryTest method testAppendTableSourceFactory.
@Test
public void testAppendTableSourceFactory() {
DescriptorProperties descriptor = createDescriptor(testingSchema);
descriptor.putString("update-mode", "append");
TableSource sink = createTableSource(descriptor);
assertTrue(sink instanceof CsvTableSource);
assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
Aggregations