Search in sources :

Example 21 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class DataGenTableSourceFactoryTest method testWrongKey.

@Test
public void testWrongKey() {
    try {
        DescriptorProperties descriptor = new DescriptorProperties();
        descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
        descriptor.putLong("wrong-rows-per-second", 1);
        createTableSource(ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap());
    } catch (ValidationException e) {
        Throwable cause = e.getCause();
        Assert.assertTrue(cause.toString(), cause instanceof ValidationException);
        Assert.assertTrue(cause.getMessage(), cause.getMessage().contains("Unsupported options:\n\nwrong-rows-per-second"));
        return;
    }
    Assert.fail("Should fail by ValidationException.");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) DataGeneratorSourceTest(org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSourceTest) Test(org.junit.Test)

Example 22 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CsvTableSinkFactoryTest method testBatchTableSourceFactory.

@Test
public void testBatchTableSourceFactory() {
    DescriptorProperties descriptor = createDescriptor(testingSchema);
    TableSource sink = createTableSource(descriptor);
    assertTrue(sink instanceof CsvTableSource);
    assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
Also used : TableSource(org.apache.flink.table.sources.TableSource) CsvTableSource(org.apache.flink.table.sources.CsvTableSource) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) CsvTableSource(org.apache.flink.table.sources.CsvTableSource) Test(org.junit.Test)

Example 23 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CsvTableSinkFactoryTest method testAppendTableSourceFactory.

@Test
public void testAppendTableSourceFactory() {
    DescriptorProperties descriptor = createDescriptor(testingSchema);
    descriptor.putString("update-mode", "append");
    TableSource sink = createTableSource(descriptor);
    assertTrue(sink instanceof CsvTableSource);
    assertEquals(testingSchema.toRowDataType(), sink.getProducedDataType());
}
Also used : TableSource(org.apache.flink.table.sources.TableSource) CsvTableSource(org.apache.flink.table.sources.CsvTableSource) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) CsvTableSource(org.apache.flink.table.sources.CsvTableSource) Test(org.junit.Test)

Example 24 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CsvTableSinkFactoryTest method createDescriptor.

private DescriptorProperties createDescriptor(TableSchema schema) {
    Map<String, String> properties = new HashMap<>();
    properties.put("connector.type", "filesystem");
    properties.put("connector.property-version", "1");
    properties.put("connector.path", "/path/to/csv");
    // schema
    properties.put("format.type", "csv");
    properties.put("format.property-version", "1");
    properties.put("format.field-delimiter", ";");
    DescriptorProperties descriptor = new DescriptorProperties(true);
    descriptor.putProperties(properties);
    descriptor.putTableSchema(SCHEMA, schema);
    if (deriveSchema == TernaryBoolean.TRUE) {
        descriptor.putBoolean("format.derive-schema", true);
    } else if (deriveSchema == TernaryBoolean.FALSE) {
        descriptor.putTableSchema(FORMAT_FIELDS, testingSchema);
    }
    // nothing to put for UNDEFINED
    return descriptor;
}
Also used : HashMap(java.util.HashMap) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties)

Example 25 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CsvTableSourceFactoryBase method createTableSource.

protected CsvTableSource createTableSource(Boolean isStreaming, Map<String, String> properties) {
    DescriptorProperties params = new DescriptorProperties();
    params.putProperties(properties);
    // validate
    new FileSystemValidator().validate(params);
    new OldCsvValidator().validate(params);
    new SchemaValidator(isStreaming, false, false).validate(params);
    // build
    CsvTableSource.Builder csvTableSourceBuilder = new CsvTableSource.Builder();
    TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
    // if a schema is defined, no matter derive schema is set or not, will use the defined
    // schema
    final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
    if (hasSchema) {
        TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
        // Ignore conversion classes in DataType
        if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
            throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSource, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
        }
    }
    params.getOptionalString(CONNECTOR_PATH).ifPresent(csvTableSourceBuilder::path);
    params.getOptionalString(FORMAT_FIELD_DELIMITER).ifPresent(csvTableSourceBuilder::fieldDelimiter);
    params.getOptionalString(FORMAT_LINE_DELIMITER).ifPresent(csvTableSourceBuilder::lineDelimiter);
    for (int i = 0; i < tableSchema.getFieldCount(); ++i) {
        csvTableSourceBuilder.field(tableSchema.getFieldNames()[i], tableSchema.getFieldDataTypes()[i]);
    }
    params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER).ifPresent(csvTableSourceBuilder::quoteCharacter);
    params.getOptionalString(FORMAT_COMMENT_PREFIX).ifPresent(csvTableSourceBuilder::commentPrefix);
    params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE).ifPresent(flag -> {
        if (flag) {
            csvTableSourceBuilder.ignoreFirstLine();
        }
    });
    params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS).ifPresent(flag -> {
        if (flag) {
            csvTableSourceBuilder.ignoreParseErrors();
        }
    });
    return csvTableSourceBuilder.build();
}
Also used : TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator)

Aggregations

DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)32 Test (org.junit.Test)22 ValidationException (org.apache.flink.table.api.ValidationException)13 DataGeneratorSourceTest (org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSourceTest)9 HashMap (java.util.HashMap)6 TableSchema (org.apache.flink.table.api.TableSchema)6 GenericRowData (org.apache.flink.table.data.GenericRowData)3 RowData (org.apache.flink.table.data.RowData)3 ArrayList (java.util.ArrayList)2 TableException (org.apache.flink.table.api.TableException)2 DynamicTableSource (org.apache.flink.table.connector.source.DynamicTableSource)2 FileSystemValidator (org.apache.flink.table.descriptors.FileSystemValidator)2 OldCsvValidator (org.apache.flink.table.descriptors.OldCsvValidator)2 SchemaValidator (org.apache.flink.table.descriptors.SchemaValidator)2 CsvTableSink (org.apache.flink.table.sinks.CsvTableSink)2 TableSink (org.apache.flink.table.sinks.TableSink)2 CsvTableSource (org.apache.flink.table.sources.CsvTableSource)2 TableSource (org.apache.flink.table.sources.TableSource)2 DataType (org.apache.flink.table.types.DataType)2 IOException (java.io.IOException)1