use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CatalogTableImpTest method testToProperties.
@Test
public void testToProperties() {
TableSchema schema = createTableSchema();
Map<String, String> prop = createProperties();
CatalogTable table = new CatalogTableImpl(schema, createPartitionKeys(), prop, TEST);
DescriptorProperties descriptorProperties = new DescriptorProperties(false);
descriptorProperties.putProperties(table.toProperties());
assertEquals(schema, descriptorProperties.getTableSchema(Schema.SCHEMA));
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CsvTableSinkFactoryBase method createTableSink.
protected CsvTableSink createTableSink(Boolean isStreaming, Map<String, String> properties) {
DescriptorProperties params = new DescriptorProperties();
params.putProperties(properties);
// validate
new FileSystemValidator().validate(params);
new OldCsvValidator().validate(params);
new SchemaValidator(isStreaming, false, false).validate(params);
// build
TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
// if a schema is defined, no matter derive schema is set or not, will use the defined
// schema
final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
if (hasSchema) {
TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSink, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
}
}
String path = params.getString(CONNECTOR_PATH);
String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(",");
Optional<String> writeModeParam = params.getOptionalString(FORMAT_WRITE_MODE);
FileSystem.WriteMode writeMode = (writeModeParam.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParam.get()) : null;
int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1);
// bridge to java.sql.Timestamp/Time/Date
DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes()).map(dt -> {
switch(dt.getLogicalType().getTypeRoot()) {
case TIMESTAMP_WITHOUT_TIME_ZONE:
return dt.bridgedTo(Timestamp.class);
case TIME_WITHOUT_TIME_ZONE:
return dt.bridgedTo(Time.class);
case DATE:
return dt.bridgedTo(Date.class);
default:
return dt;
}
}).toArray(DataType[]::new);
return new CsvTableSink(path, fieldDelimiter, numFiles, writeMode, tableSchema.getFieldNames(), dataTypes);
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CatalogTableImpl method toProperties.
@Override
public Map<String, String> toProperties() {
DescriptorProperties descriptor = new DescriptorProperties(false);
descriptor.putTableSchema(SCHEMA, getSchema());
descriptor.putPartitionKeys(getPartitionKeys());
Map<String, String> properties = new HashMap<>(getOptions());
descriptor.putProperties(properties);
return descriptor.asMap();
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class CatalogTableImpl method removeRedundant.
/**
* Construct catalog table properties from {@link #toProperties()}.
*/
public static Map<String, String> removeRedundant(Map<String, String> properties, TableSchema schema, List<String> partitionKeys) {
Map<String, String> ret = new HashMap<>(properties);
DescriptorProperties descriptorProperties = new DescriptorProperties(false);
descriptorProperties.putTableSchema(SCHEMA, schema);
descriptorProperties.putPartitionKeys(partitionKeys);
descriptorProperties.asMap().keySet().forEach(ret::remove);
return ret;
}
use of org.apache.flink.table.descriptors.DescriptorProperties in project flink by apache.
the class DataGenTableSourceFactoryTest method testLackEndForSequence.
@Test
public void testLackEndForSequence() {
try {
DescriptorProperties descriptor = new DescriptorProperties();
descriptor.putString(FactoryUtil.CONNECTOR.key(), "datagen");
descriptor.putString(DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.KIND, DataGenConnectorOptionsUtil.SEQUENCE);
descriptor.putLong(DataGenConnectorOptionsUtil.FIELDS + ".f0." + DataGenConnectorOptionsUtil.START, 0);
createTableSource(ResolvedSchema.of(Column.physical("f0", DataTypes.BIGINT())), descriptor.asMap());
} catch (ValidationException e) {
Throwable cause = e.getCause();
Assert.assertTrue(cause.toString(), cause instanceof ValidationException);
Assert.assertTrue(cause.getMessage(), cause.getMessage().contains("Could not find required property 'fields.f0.end' for sequence generator."));
return;
}
Assert.fail("Should fail by ValidationException.");
}
Aggregations