Search in sources :

Example 16 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CatalogTableImpTest method testToProperties.

@Test
public void testToProperties() {
    TableSchema schema = createTableSchema();
    Map<String, String> prop = createProperties();
    CatalogTable table = new CatalogTableImpl(schema, createPartitionKeys(), prop, TEST);
    DescriptorProperties descriptorProperties = new DescriptorProperties(false);
    descriptorProperties.putProperties(table.toProperties());
    assertEquals(schema, descriptorProperties.getTableSchema(Schema.SCHEMA));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Test(org.junit.Test)

Example 17 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CsvTableSinkFactoryBase method createTableSink.

protected CsvTableSink createTableSink(Boolean isStreaming, Map<String, String> properties) {
    DescriptorProperties params = new DescriptorProperties();
    params.putProperties(properties);
    // validate
    new FileSystemValidator().validate(params);
    new OldCsvValidator().validate(params);
    new SchemaValidator(isStreaming, false, false).validate(params);
    // build
    TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
    // if a schema is defined, no matter derive schema is set or not, will use the defined
    // schema
    final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
    if (hasSchema) {
        TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
        if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
            throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSink, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
        }
    }
    String path = params.getString(CONNECTOR_PATH);
    String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(",");
    Optional<String> writeModeParam = params.getOptionalString(FORMAT_WRITE_MODE);
    FileSystem.WriteMode writeMode = (writeModeParam.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParam.get()) : null;
    int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1);
    // bridge to java.sql.Timestamp/Time/Date
    DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes()).map(dt -> {
        switch(dt.getLogicalType().getTypeRoot()) {
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                return dt.bridgedTo(Timestamp.class);
            case TIME_WITHOUT_TIME_ZONE:
                return dt.bridgedTo(Time.class);
            case DATE:
                return dt.bridgedTo(Date.class);
            default:
                return dt;
        }
    }).toArray(DataType[]::new);
    return new CsvTableSink(path, fieldDelimiter, numFiles, writeMode, tableSchema.getFieldNames(), dataTypes);
}
Also used : DataType(org.apache.flink.table.types.DataType) FORMAT_FIELDS(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_FIELDS) Arrays(java.util.Arrays) CONNECTOR_PROPERTY_VERSION(org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_PROPERTY_VERSION) TableFactoryService(org.apache.flink.table.factories.TableFactoryService) Time(java.sql.Time) CONNECTOR_PATH(org.apache.flink.table.descriptors.FileSystemValidator.CONNECTOR_PATH) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) HashMap(java.util.HashMap) FORMAT_NUM_FILES(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_NUM_FILES) FORMAT_WRITE_MODE(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_WRITE_MODE) ArrayList(java.util.ArrayList) FORMAT_FIELD_DELIMITER(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_FIELD_DELIMITER) CONNECTOR_TYPE(org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE) Map(java.util.Map) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator) TableFactory(org.apache.flink.table.factories.TableFactory) CsvTableSourceFactoryBase.getFieldLogicalTypes(org.apache.flink.table.sources.CsvTableSourceFactoryBase.getFieldLogicalTypes) CONNECTOR_TYPE_VALUE(org.apache.flink.table.descriptors.FileSystemValidator.CONNECTOR_TYPE_VALUE) SCHEMA(org.apache.flink.table.descriptors.Schema.SCHEMA) Timestamp(java.sql.Timestamp) COMMENT(org.apache.flink.table.descriptors.DescriptorProperties.COMMENT) TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) Date(java.sql.Date) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) List(java.util.List) FileSystem(org.apache.flink.core.fs.FileSystem) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TableSchemaUtils(org.apache.flink.table.utils.TableSchemaUtils) FORMAT_TYPE_VALUE(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_TYPE_VALUE) TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator) Time(java.sql.Time) Timestamp(java.sql.Timestamp) Date(java.sql.Date) FileSystem(org.apache.flink.core.fs.FileSystem) DataType(org.apache.flink.table.types.DataType)

Example 18 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CatalogTableImpl method toProperties.

@Override
public Map<String, String> toProperties() {
    DescriptorProperties descriptor = new DescriptorProperties(false);
    descriptor.putTableSchema(SCHEMA, getSchema());
    descriptor.putPartitionKeys(getPartitionKeys());
    Map<String, String> properties = new HashMap<>(getOptions());
    descriptor.putProperties(properties);
    return descriptor.asMap();
}
Also used : DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) HashMap(java.util.HashMap)

Example 19 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class CatalogTableImpl method removeRedundant.

/**
 * Construct catalog table properties from {@link #toProperties()}.
 */
public static Map<String, String> removeRedundant(Map<String, String> properties, TableSchema schema, List<String> partitionKeys) {
    Map<String, String> ret = new HashMap<>(properties);
    DescriptorProperties descriptorProperties = new DescriptorProperties(false);
    descriptorProperties.putTableSchema(SCHEMA, schema);
    descriptorProperties.putPartitionKeys(partitionKeys);
    descriptorProperties.asMap().keySet().forEach(ret::remove);
    return ret;
}
Also used : HashMap(java.util.HashMap) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties)

Example 20 with DescriptorProperties

use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.

the class DataGenTableSourceFactoryTest method testLackEndForSequence.

@Test
public void testLackEndForSequence() {
    try {
        DescriptorProperties descriptor = new DescriptorProperties();
        descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
        descriptor.putString(DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE);
        descriptor.putLong(DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, 0);
        createTableSource(ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap());
    } catch (ValidationException e) {
        Throwable cause = e.getCause();
        Assert.assertTrue(cause.toString(), cause instanceof ValidationException);
        Assert.assertTrue(cause.getMessage(), cause.getMessage().contains("Could not find required property 'fields.f0.end' for sequence generator."));
        return;
    }
    Assert.fail("Should fail by ValidationException.");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) DataGeneratorSourceTest(org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSourceTest) Test(org.junit.Test)

Aggregations

DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)32 Test (org.junit.Test)22 ValidationException (org.apache.flink.table.api.ValidationException)13 DataGeneratorSourceTest (org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSourceTest)9 HashMap (java.util.HashMap)6 TableSchema (org.apache.flink.table.api.TableSchema)6 GenericRowData (org.apache.flink.table.data.GenericRowData)3 RowData (org.apache.flink.table.data.RowData)3 ArrayList (java.util.ArrayList)2 TableException (org.apache.flink.table.api.TableException)2 DynamicTableSource (org.apache.flink.table.connector.source.DynamicTableSource)2 FileSystemValidator (org.apache.flink.table.descriptors.FileSystemValidator)2 OldCsvValidator (org.apache.flink.table.descriptors.OldCsvValidator)2 SchemaValidator (org.apache.flink.table.descriptors.SchemaValidator)2 CsvTableSink (org.apache.flink.table.sinks.CsvTableSink)2 TableSink (org.apache.flink.table.sinks.TableSink)2 CsvTableSource (org.apache.flink.table.sources.CsvTableSource)2 TableSource (org.apache.flink.table.sources.TableSource)2 DataType (org.apache.flink.table.types.DataType)2 IOException (java.io.IOException)1