use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class TestHiveIcebergStorageHandlerNoScan method testCreateDropTableNonDefaultCatalog.
@Test
public void testCreateDropTableNonDefaultCatalog() {
TableIdentifier identifier = TableIdentifier.of("default", "customers");
String catalogName = "nondefaultcatalog";
testTables.properties().entrySet().forEach(e -> shell.setHiveSessionValue(e.getKey().replace(testTables.catalog, catalogName), e.getValue()));
String createSql = "CREATE EXTERNAL TABLE " + identifier + " (customer_id BIGINT, first_name STRING COMMENT 'This is first name'," + " last_name STRING COMMENT 'This is last name')" + " STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + testTables.propertiesForCreateTableSQL(ImmutableMap.of());
shell.executeStatement(createSql);
Table icebergTable = testTables.loadTable(identifier);
Assert.assertEquals(HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA.asStruct(), icebergTable.schema().asStruct());
shell.executeStatement("DROP TABLE default.customers");
// Check if the table was really dropped even from the Catalog
AssertHelpers.assertThrows("should throw exception", NoSuchTableException.class, "Table does not exist", () -> {
testTables.loadTable(identifier);
});
}
use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class TestHiveIcebergStorageHandlerNoScan method testDropHiveTableWithoutUnderlyingTable.
@Test
public void testDropHiveTableWithoutUnderlyingTable() throws IOException {
Assume.assumeFalse("Not relevant for HiveCatalog", testTableType.equals(TestTables.TestTableType.HIVE_CATALOG));
TableIdentifier identifier = TableIdentifier.of("default", "customers");
// Create the Iceberg table in non-HiveCatalog
testTables.createIcebergTable(shell.getHiveConf(), identifier.name(), HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA, FileFormat.PARQUET, Collections.emptyMap(), HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS);
// Create Hive table on top
String tableLocation = testTables.locationForCreateTableSQL(identifier);
shell.executeStatement(testTables.createHiveTableSQL(identifier, ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE, "TRUE")));
// Drop the Iceberg table
Properties properties = new Properties();
properties.put(Catalogs.NAME, identifier.toString());
properties.put(Catalogs.LOCATION, tableLocation);
Catalogs.dropTable(shell.getHiveConf(), properties);
// Finally drop the Hive table as well
shell.executeStatement("DROP TABLE " + identifier);
}
use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class TestHiveIcebergStorageHandlerNoScan method testCreateTableError.
@Test
public void testCreateTableError() {
TableIdentifier identifier = TableIdentifier.of("default", "withShell2");
// Wrong schema
AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Unrecognized token 'WrongSchema'", () -> {
shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + "TBLPROPERTIES ('" + InputFormatConfig.TABLE_SCHEMA + "'='WrongSchema'" + ",'" + InputFormatConfig.CATALOG_NAME + "'='" + testTables.catalogName() + "')");
});
// Missing schema, we try to get the schema from the table and fail
AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Please provide ", () -> {
shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + testTables.propertiesForCreateTableSQL(ImmutableMap.of()));
});
if (!testTables.locationForCreateTableSQL(identifier).isEmpty()) {
// Only test this if the location is required
AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Table location not set", () -> {
shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + "TBLPROPERTIES ('" + InputFormatConfig.TABLE_SCHEMA + "'='" + SchemaParser.toJson(HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA) + "','" + InputFormatConfig.CATALOG_NAME + "'='" + testTables.catalogName() + "')");
});
}
}
use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class TestHiveIcebergStorageHandlerNoScan method testAlterTableReplaceColumnsFailsWhenNotOnlyDropping.
@Test
public void testAlterTableReplaceColumnsFailsWhenNotOnlyDropping() {
TableIdentifier identifier = TableIdentifier.of("default", "customers");
Schema schema = new Schema(optional(1, "customer_id", Types.IntegerType.get()), optional(2, "first_name", Types.StringType.get(), "This is first name"), optional(3, "last_name", Types.StringType.get(), "This is last name"), optional(4, "address", Types.StructType.of(optional(5, "city", Types.StringType.get()), optional(6, "street", Types.StringType.get())), null));
testTables.createTable(shell, identifier.name(), schema, SPEC, FileFormat.PARQUET, ImmutableList.of());
// check unsupported operations
String[] commands = { // type promotion
"ALTER TABLE default.customers REPLACE COLUMNS (customer_id bigint, first_name string COMMENT 'This is " + "first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // delete a comment
"ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string, " + "last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // change a comment
"ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'New docs', " + "last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // reorder columns
"ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, last_name string COMMENT 'This is " + "last name', first_name string COMMENT 'This is first name', address struct<city:string,street:string>)", // add new column
"ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'This is " + "first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>, " + "new_col timestamp)", // dropping a column + reordering columns
"ALTER TABLE default.customers REPLACE COLUMNS (last_name string COMMENT 'This is " + "last name', first_name string COMMENT 'This is first name', address struct<city:string,street:string>)" };
for (String command : commands) {
AssertHelpers.assertThrows("", IllegalArgumentException.class, "Unsupported operation to use REPLACE COLUMNS", () -> shell.executeStatement(command));
}
// check no-op case too
String command = "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'This" + " is first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>)";
AssertHelpers.assertThrows("", IllegalArgumentException.class, "No schema change detected", () -> shell.executeStatement(command));
}
use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.
the class TestHiveIcebergStorageHandlerWithMultipleCatalogs method testCTASFromOtherCatalogFailureRollback.
@Test
public void testCTASFromOtherCatalogFailureRollback() throws IOException {
// force an execution error by passing in a committer class that Tez won't be able to load
shell.setHiveSessionValue("hive.tez.mapreduce.output.committer.class", "org.apache.NotExistingClass");
TableIdentifier target = TableIdentifier.of("default", "target");
testTables2.createTable(shell, "source", HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA, fileFormat2, HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS);
AssertHelpers.assertThrows("Should fail while loading non-existent output committer class.", IllegalArgumentException.class, "org.apache.NotExistingClass", () -> shell.executeStatement(String.format("CREATE TABLE target STORED BY ICEBERG TBLPROPERTIES ('%s'='%s') AS SELECT * FROM source", InputFormatConfig.CATALOG_NAME, HIVECATALOGNAME)));
// CTAS table should have been dropped by the lifecycle hook
Assert.assertThrows(NoSuchTableException.class, () -> testTables1.loadTable(target));
}
Aggregations