use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class DataGenTableSourceFactoryTest method testWrongKey.
@Test
public void testWrongKey() {
try {
DescriptorProperties descriptor = new DescriptorProperties();
descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
descriptor.putLong("wrong-rows-per-second", 1);
createTableSource(ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap());
} catch (ValidationException e) {
Throwable cause = e.getCause();
Assert.assertTrue(cause.toString(), cause instanceof ValidationException);
Assert.assertTrue(cause.getMessage(), cause.getMessage().contains("Unsupported options:\n\nwrong-rows-per-second"));
return;
}
Assert.fail("Should fail by ValidationException.");
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CsvTableSinkFactoryTest method testBatchTableSourceFactory.
@Test
public void testBatchTableSourceFactory() {
DescriptorProperties descriptor = createDescriptor(testingSchema);
TableSource sink = createTableSource(descriptor);
assertTrue(sink instanceof CsvTableSource);
assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CsvTableSinkFactoryTest method testAppendTableSourceFactory.
@Test
public void testAppendTableSourceFactory() {
DescriptorProperties descriptor = createDescriptor(testingSchema);
descriptor.putString("update-mode", "append");
TableSource sink = createTableSource(descriptor);
assertTrue(sink instanceof CsvTableSource);
assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CsvTableSinkFactoryTest method createDescriptor.
private DescriptorProperties createDescriptor(TableSchema schema) {
Map<String, String> properties = new HashMap<>();
properties.put("connector.type", "filesystem");
properties.put("connector.property-version", "1");
properties.put("connector.path", "/path/to/csv");
// schema
properties.put("format.type", "csv");
properties.put("format.property-version", "1");
properties.put("format.field-delimiter", ";");
DescriptorProperties descriptor = new DescriptorProperties(true);
descriptor.putProperties(properties);
descriptor.putTableSchema(SCHEMA, schema);
if (deriveSchema == TernaryBoolean.TRUE) {
descriptor.putBoolean("format.derive-schema", true);
} else if (deriveSchema == TernaryBoolean.FALSE) {
descriptor.putTableSchema(FORMAT_FIELDS, testingSchema);
}
// nothing to put for UNDEFINED
return descriptor;
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CsvTableSourceFactoryBase method createTableSource.
protected CsvTableSource createTableSource(Boolean isStreaming, Map<String, String> properties) {
DescriptorProperties params = new DescriptorProperties();
params.putProperties(properties);
// validate
new FileSystemValidator().validate(params);
new OldCsvValidator().validate(params);
new SchemaValidator(isStreaming, false, false).validate(params);
// build
CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder();
TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
// if a schema is defined, no matter derive schema is set or not, will use the defined
// schema
final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
if (hasSchema) {
TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
// Ignore conversion classes in DataType
if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSource, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
}
}
params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path);
params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter);
params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter);
for (int i = 0; i < tableSchema.getFieldCount(); ++i) {
csvTableSourceBuilder.field(tableSchema.getFieldNames()[i], tableSchema.getFieldDataTypes()[i]);
}
params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter);
params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix);
params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> {
if (flag) {
csvTableSourceBuilder.ignoreFirstLine();
}
});
params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> {
if (flag) {
csvTableSourceBuilder.ignoreParseErrors();
}
});
return csvTableSourceBuilder.build();
}
Aggregations