Search in sources :

Example 51 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class HiveOutputFormatFactoryTest method testCreateOutputFormat.

@Test
public void testCreateOutputFormat() {
    TableSchema schema = TableSchema.builder().field("x", DataTypes.INT()).build();
    SerDeInfo serDeInfo = new SerDeInfo("name", LazySimpleSerDe.class.getName(), Collections.emptyMap());
    HiveWriterFactory writerFactory = new HiveWriterFactory(new JobConf(), VerifyURIOutputFormat.class, serDeInfo, schema, new String[0], new Properties(), HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion()), false);
    HiveOutputFormatFactory factory = new HiveOutputFormatFactory(writerFactory);
    org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(TEST_URI_SCHEME, TEST_URI_AUTHORITY, "/foo/path");
    factory.createOutputFormat(path);
}
Also used : Path(org.apache.hadoop.fs.Path) TableSchema(org.apache.flink.table.api.TableSchema) LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) HiveOutputFormatFactory(org.apache.flink.connectors.hive.write.HiveOutputFormatFactory) Properties(java.util.Properties) HiveWriterFactory(org.apache.flink.connectors.hive.write.HiveWriterFactory) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 52 with TableSchema

use of org.apache.flink.table.api.TableSchema in project zeppelin by apache.

the class FlinkSqlInterpreter method callDescribe.

private void callDescribe(String name, InterpreterContext context) throws IOException {
    TableSchema schema = tbenv.scan(name.split("\\.")).getSchema();
    StringBuilder builder = new StringBuilder();
    builder.append("Column\tType\n");
    for (int i = 0; i < schema.getFieldCount(); ++i) {
        builder.append(schema.getFieldName(i).get() + "\t" + schema.getFieldDataType(i).get() + "\n");
    }
    context.out.write("%table\n" + builder.toString());
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema)

Example 53 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogTableImpTest method testToProperties.

@Test
public void testToProperties() {
    TableSchema schema = createTableSchema();
    Map<String, String> prop = createProperties();
    CatalogTable table = new CatalogTableImpl(schema, createPartitionKeys(), prop, TEST);
    DescriptorProperties descriptorProperties = new DescriptorProperties(false);
    descriptorProperties.putProperties(table.toProperties());
    assertEquals(schema, descriptorProperties.getTableSchema(Schema.SCHEMA));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Test(org.junit.Test)

Example 54 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CatalogTableImpTest method testNullComment.

@Test
public void testNullComment() {
    TableSchema schema = createTableSchema();
    Map<String, String> prop = createProperties();
    CatalogTable table = new CatalogTableImpl(schema, createPartitionKeys(), prop, null);
    assertEquals("", table.getComment());
    assertEquals(Optional.of(""), table.getDescription());
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) Test(org.junit.Test)

Example 55 with TableSchema

use of org.apache.flink.table.api.TableSchema in project flink by apache.

the class CsvTableSinkFactoryBase method createTableSink.

protected CsvTableSink createTableSink(Boolean isStreaming, Map<String, String> properties) {
    DescriptorProperties params = new DescriptorProperties();
    params.putProperties(properties);
    // validate
    new FileSystemValidator().validate(params);
    new OldCsvValidator().validate(params);
    new SchemaValidator(isStreaming, false, false).validate(params);
    // build
    TableSchema tableSchema = TableSchemaUtils.getPhysicalSchema(params.getTableSchema(SCHEMA));
    // if a schema is defined, no matter derive schema is set or not, will use the defined
    // schema
    final boolean hasSchema = params.hasPrefix(FORMAT_FIELDS);
    if (hasSchema) {
        TableSchema formatSchema = params.getTableSchema(FORMAT_FIELDS);
        if (!getFieldLogicalTypes(formatSchema).equals(getFieldLogicalTypes(tableSchema))) {
            throw new TableException(String.format("Encodings that differ from the schema are not supported yet for" + " CsvTableSink, format schema is '%s', but table schema is '%s'.", formatSchema, tableSchema));
        }
    }
    String path = params.getString(CONNECTOR_PATH);
    String fieldDelimiter = params.getOptionalString(FORMAT_FIELD_DELIMITER).orElse(",");
    Optional<String> writeModeParam = params.getOptionalString(FORMAT_WRITE_MODE);
    FileSystem.WriteMode writeMode = (writeModeParam.isPresent()) ? FileSystem.WriteMode.valueOf(writeModeParam.get()) : null;
    int numFiles = params.getOptionalInt(FORMAT_NUM_FILES).orElse(-1);
    // bridge to java.sql.Timestamp/Time/Date
    DataType[] dataTypes = Arrays.stream(tableSchema.getFieldDataTypes()).map(dt -> {
        switch(dt.getLogicalType().getTypeRoot()) {
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                return dt.bridgedTo(Timestamp.class);
            case TIME_WITHOUT_TIME_ZONE:
                return dt.bridgedTo(Time.class);
            case DATE:
                return dt.bridgedTo(Date.class);
            default:
                return dt;
        }
    }).toArray(DataType[]::new);
    return new CsvTableSink(path, fieldDelimiter, numFiles, writeMode, tableSchema.getFieldNames(), dataTypes);
}
Also used : DataType(org.apache.flink.table.types.DataType) FORMAT_FIELDS(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_FIELDS) Arrays(java.util.Arrays) CONNECTOR_PROPERTY_VERSION(org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_PROPERTY_VERSION) TableFactoryService(org.apache.flink.table.factories.TableFactoryService) Time(java.sql.Time) CONNECTOR_PATH(org.apache.flink.table.descriptors.FileSystemValidator.CONNECTOR_PATH) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) HashMap(java.util.HashMap) FORMAT_NUM_FILES(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_NUM_FILES) FORMAT_WRITE_MODE(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_WRITE_MODE) ArrayList(java.util.ArrayList) FORMAT_FIELD_DELIMITER(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_FIELD_DELIMITER) CONNECTOR_TYPE(org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE) Map(java.util.Map) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator) TableFactory(org.apache.flink.table.factories.TableFactory) CsvTableSourceFactoryBase.getFieldLogicalTypes(org.apache.flink.table.sources.CsvTableSourceFactoryBase.getFieldLogicalTypes) CONNECTOR_TYPE_VALUE(org.apache.flink.table.descriptors.FileSystemValidator.CONNECTOR_TYPE_VALUE) SCHEMA(org.apache.flink.table.descriptors.Schema.SCHEMA) Timestamp(java.sql.Timestamp) COMMENT(org.apache.flink.table.descriptors.DescriptorProperties.COMMENT) TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) Date(java.sql.Date) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) List(java.util.List) FileSystem(org.apache.flink.core.fs.FileSystem) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TableSchemaUtils(org.apache.flink.table.utils.TableSchemaUtils) FORMAT_TYPE_VALUE(org.apache.flink.table.descriptors.OldCsvValidator.FORMAT_TYPE_VALUE) TableException(org.apache.flink.table.api.TableException) TableSchema(org.apache.flink.table.api.TableSchema) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) OldCsvValidator(org.apache.flink.table.descriptors.OldCsvValidator) FileSystemValidator(org.apache.flink.table.descriptors.FileSystemValidator) SchemaValidator(org.apache.flink.table.descriptors.SchemaValidator) Time(java.sql.Time) Timestamp(java.sql.Timestamp) Date(java.sql.Date) FileSystem(org.apache.flink.core.fs.FileSystem) DataType(org.apache.flink.table.types.DataType)

Aggregations

TableSchema (org.apache.flink.table.api.TableSchema)86 Test (org.junit.Test)54 HashMap (java.util.HashMap)26 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)21 SqlNode (org.apache.calcite.sql.SqlNode)19 ObjectPath (org.apache.flink.table.catalog.ObjectPath)19 CatalogTable (org.apache.flink.table.catalog.CatalogTable)18 DataType (org.apache.flink.table.types.DataType)16 ValidationException (org.apache.flink.table.api.ValidationException)14 TableColumn (org.apache.flink.table.api.TableColumn)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Map (java.util.Map)9 FeatureOption (org.apache.flink.sql.parser.ddl.SqlTableLike.FeatureOption)9 MergingStrategy (org.apache.flink.sql.parser.ddl.SqlTableLike.MergingStrategy)9 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)8 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 Arrays (java.util.Arrays)7 Configuration (org.apache.flink.configuration.Configuration)7