Search in sources :

Example 46 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class UpsertKafkaDynamicTableFactoryTest method testTableSource.

@Test
public void testTableSource() {
    final DataType producedDataType = SOURCE_SCHEMA.toPhysicalRowDataType();
    // Construct table source using options and table source factory
    final DynamicTableSource actualSource = createTableSource(SOURCE_SCHEMA, getFullSourceOptions());
    final KafkaDynamicSource expectedSource = createExpectedScanSource(producedDataType, keyDecodingFormat, valueDecodingFormat, SOURCE_KEY_FIELDS, SOURCE_VALUE_FIELDS, null, SOURCE_TOPIC, UPSERT_KAFKA_SOURCE_PROPERTIES);
    assertEquals(actualSource, expectedSource);
    final KafkaDynamicSource actualUpsertKafkaSource = (KafkaDynamicSource) actualSource;
    ScanTableSource.ScanRuntimeProvider provider = actualUpsertKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    assertKafkaSource(provider);
}
Also used : ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.Test)

Example 47 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class KafkaDynamicTableFactoryTest method testTableSourceWithKeyValueAndMetadata.

@Test
public void testTableSourceWithKeyValueAndMetadata() {
    final Map<String, String> options = getKeyValueOptions();
    options.put("value.test-format.readable-metadata", "metadata_1:INT, metadata_2:STRING");
    final DynamicTableSource actualSource = createTableSource(SCHEMA_WITH_METADATA, options);
    final KafkaDynamicSource actualKafkaSource = (KafkaDynamicSource) actualSource;
    // initialize stateful testing formats
    actualKafkaSource.applyReadableMetadata(Arrays.asList("timestamp", "value.metadata_2"), SCHEMA_WITH_METADATA.toSourceRowDataType());
    actualKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    final DecodingFormatMock expectedKeyFormat = new DecodingFormatMock("#", false, ChangelogMode.insertOnly(), Collections.emptyMap());
    expectedKeyFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(NAME, DataTypes.STRING())).notNull();
    final Map<String, DataType> expectedReadableMetadata = new HashMap<>();
    expectedReadableMetadata.put("metadata_1", DataTypes.INT());
    expectedReadableMetadata.put("metadata_2", DataTypes.STRING());
    final DecodingFormatMock expectedValueFormat = new DecodingFormatMock("|", false, ChangelogMode.insertOnly(), expectedReadableMetadata);
    expectedValueFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(COUNT, DataTypes.DECIMAL(38, 18)), DataTypes.FIELD("metadata_2", DataTypes.STRING())).notNull();
    expectedValueFormat.metadataKeys = Collections.singletonList("metadata_2");
    final KafkaDynamicSource expectedKafkaSource = createExpectedScanSource(SCHEMA_WITH_METADATA.toPhysicalRowDataType(), expectedKeyFormat, expectedValueFormat, new int[] { 0 }, new int[] { 1 }, null, Collections.singletonList(TOPIC), null, KAFKA_FINAL_SOURCE_PROPERTIES, StartupMode.GROUP_OFFSETS, Collections.emptyMap(), 0);
    expectedKafkaSource.producedDataType = SCHEMA_WITH_METADATA.toSourceRowDataType();
    expectedKafkaSource.metadataKeys = Collections.singletonList("timestamp");
    assertThat(actualSource).isEqualTo(expectedKafkaSource);
}
Also used : HashMap(java.util.HashMap) DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) DataType(org.apache.flink.table.types.DataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 48 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class KafkaConnectorOptionsUtilTest method testMissingKeyFormatProjection.

@Test
public void testMissingKeyFormatProjection() {
    final DataType dataType = ROW(FIELD("id", INT()));
    final Map<String, String> options = createTestOptions();
    final Configuration config = Configuration.fromMap(options);
    try {
        createKeyFormatProjection(config, dataType);
        fail();
    } catch (ValidationException e) {
        assertThat(e, hasMessage(equalTo("A key format 'key.format' requires the declaration of one or more " + "of key fields using 'key.fields'.")));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration) DataType(org.apache.flink.table.types.DataType) Test(org.junit.Test)

Example 49 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class KafkaConnectorOptionsUtilTest method testInvalidKeyFormatFieldProjection.

@Test
public void testInvalidKeyFormatFieldProjection() {
    final DataType dataType = ROW(FIELD("id", INT()), FIELD("name", STRING()));
    final Map<String, String> options = createTestOptions();
    options.put("key.fields", "non_existing");
    final Configuration config = Configuration.fromMap(options);
    try {
        createKeyFormatProjection(config, dataType);
        fail();
    } catch (ValidationException e) {
        assertThat(e, hasMessage(equalTo("Could not find the field 'non_existing' in the table schema for " + "usage in the key format. A key field must be a regular, " + "physical column. The following columns can be selected " + "in the 'key.fields' option:\n" + "[id, name]")));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration) DataType(org.apache.flink.table.types.DataType) Test(org.junit.Test)

Example 50 with DataType

use of org.apache.flink.table.types.DataType in project flink by apache.

the class KafkaConnectorOptionsUtilTest method testInvalidValueFormatProjection.

@Test
public void testInvalidValueFormatProjection() {
    final DataType dataType = ROW(FIELD("k_id", INT()), FIELD("id", STRING()));
    final Map<String, String> options = createTestOptions();
    options.put("key.fields", "k_id");
    options.put("key.fields-prefix", "k_");
    final Configuration config = Configuration.fromMap(options);
    try {
        createValueFormatProjection(config, dataType);
        fail();
    } catch (ValidationException e) {
        assertThat(e, hasMessage(equalTo("A key prefix is not allowed when option 'value.fields-include' " + "is set to 'ALL'. Set it to 'EXCEPT_KEY' instead to avoid field overlaps.")));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration) DataType(org.apache.flink.table.types.DataType) Test(org.junit.Test)

Aggregations

DataType (org.apache.flink.table.types.DataType)260 Test (org.junit.Test)72 RowType (org.apache.flink.table.types.logical.RowType)59 LogicalType (org.apache.flink.table.types.logical.LogicalType)58 RowData (org.apache.flink.table.data.RowData)54 List (java.util.List)38 FieldsDataType (org.apache.flink.table.types.FieldsDataType)32 ValidationException (org.apache.flink.table.api.ValidationException)31 ArrayList (java.util.ArrayList)29 Collectors (java.util.stream.Collectors)24 AtomicDataType (org.apache.flink.table.types.AtomicDataType)24 Map (java.util.Map)23 Internal (org.apache.flink.annotation.Internal)23 TableException (org.apache.flink.table.api.TableException)23 HashMap (java.util.HashMap)22 GenericRowData (org.apache.flink.table.data.GenericRowData)22 Row (org.apache.flink.types.Row)22 TableSchema (org.apache.flink.table.api.TableSchema)20 TypeConversions.fromLogicalToDataType (org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType)19 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)18