use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.
the class HiveCatalogUdfITCase method testFlinkUdf.
@Test
public void testFlinkUdf() throws Exception {
final TableSchema schema = TableSchema.builder().field("name", DataTypes.STRING()).field("age", DataTypes.INT()).build();
final Map<String, String> sourceOptions = new HashMap<>();
sourceOptions.put("connector.type", "filesystem");
sourceOptions.put("connector.path", getClass().getResource("/csv/test.csv").getPath());
sourceOptions.put("format.type", "csv");
CatalogTable source = new CatalogTableImpl(schema, sourceOptions, "Comment.");
hiveCatalog.createTable(new ObjectPath(HiveCatalog.DEFAULT_DB, sourceTableName), source, false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudf"), new CatalogFunctionImpl(TestHiveSimpleUDF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "mygenericudf"), new CatalogFunctionImpl(TestHiveGenericUDF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudtf"), new CatalogFunctionImpl(TestHiveUDTF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudaf"), new CatalogFunctionImpl(GenericUDAFSum.class.getCanonicalName()), false);
testUdf(true);
testUdf(false);
}
use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.
the class HiveCatalogDataTypeTest method createCatalogTable.
private CatalogTable createCatalogTable(DataType[] types) {
String[] colNames = new String[types.length];
for (int i = 0; i < types.length; i++) {
colNames[i] = String.format("%s_%d", types[i].toString().toLowerCase(), i);
}
TableSchema schema = TableSchema.builder().fields(colNames, types).build();
return new CatalogTableImpl(schema, new HashMap<String, String>() {
{
put("is_streaming", "false");
put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
}
}, "");
}
use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.
the class HiveCatalogTest method testAlterFlinkNonManagedTableToHiveTable.
@Test
public void testAlterFlinkNonManagedTableToHiveTable() throws Exception {
Map<String, String> originOptions = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), DataGenTableSourceFactory.IDENTIFIER);
CatalogTable originTable = new CatalogTableImpl(schema, originOptions, "Flink non-managed table");
hiveCatalog.createTable(tablePath, originTable, false);
Map<String, String> newOptions = getLegacyFileSystemConnectorOptions("/test_path");
newOptions.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
CatalogTable newTable = new CatalogTableImpl(schema, newOptions, "Hive table");
assertThatThrownBy(() -> hiveCatalog.alterTable(tablePath, newTable, false)).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("Changing catalog table type is not allowed. " + "Existing table type is 'FLINK_NON_MANAGED_TABLE', but new table type is 'HIVE_TABLE'");
}
use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.
the class HiveCatalogTest method testRetrieveFlinkProperties.
@Test
public void testRetrieveFlinkProperties() throws Exception {
ObjectPath hiveObjectPath = new ObjectPath(HiveCatalog.DEFAULT_DB, "testRetrieveProperties");
Map<String, String> options = getLegacyFileSystemConnectorOptions("/test_path");
options.put(CONNECTOR.key(), "jdbc");
options.put("url", "jdbc:clickhouse://host:port/testUrl1");
options.put("flink.url", "jdbc:clickhouse://host:port/testUrl2");
hiveCatalog.createTable(hiveObjectPath, new CatalogTableImpl(schema, options, null), false);
CatalogBaseTable hiveTable = hiveCatalog.getTable(hiveObjectPath);
assertThat(hiveTable.getOptions()).containsEntry("url", "jdbc:clickhouse://host:port/testUrl1");
assertThat(hiveTable.getOptions()).containsEntry("flink.url", "jdbc:clickhouse://host:port/testUrl2");
}
use of org.apache.flink.table.catalog.CatalogTableImpl in project flink by apache.
the class HiveCatalogTest method testAlterHiveTableToFlinkNonManagedTable.
@Test
public void testAlterHiveTableToFlinkNonManagedTable() throws Exception {
Map<String, String> originOptions = getLegacyFileSystemConnectorOptions("/test_path");
originOptions.put(FactoryUtil.CONNECTOR.key(), SqlCreateHiveTable.IDENTIFIER);
CatalogTable originTable = new CatalogTableImpl(schema, originOptions, "Hive table");
hiveCatalog.createTable(tablePath, originTable, false);
Map<String, String> newOptions = Collections.singletonMap(FactoryUtil.CONNECTOR.key(), DataGenTableSourceFactory.IDENTIFIER);
CatalogTable newTable = new CatalogTableImpl(schema, newOptions, "Flink managed table");
assertThatThrownBy(() -> hiveCatalog.alterTable(tablePath, newTable, false)).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("Changing catalog table type is not allowed. " + "Existing table type is 'HIVE_TABLE', but new table type is 'FLINK_NON_MANAGED_TABLE'");
}
Aggregations