Search in sources :

Example 56 with TableIdentifier

use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.

the class TestHiveIcebergStorageHandlerNoScan method testCreateDropTableNonDefaultCatalog.

@Test
public void testCreateDropTableNonDefaultCatalog() {
    TableIdentifier identifier = TableIdentifier.of("default", "customers");
    String catalogName = "nondefaultcatalog";
    testTables.properties().entrySet().forEach(e -> shell.setHiveSessionValue(e.getKey().replace(testTables.catalog, catalogName), e.getValue()));
    String createSql = "CREATE EXTERNAL TABLE " + identifier + " (customer_id BIGINT, first_name STRING COMMENT 'This is first name'," + " last_name STRING COMMENT 'This is last name')" + " STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + testTables.propertiesForCreateTableSQL(ImmutableMap.of());
    shell.executeStatement(createSql);
    Table icebergTable = testTables.loadTable(identifier);
    Assert.assertEquals(HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA.asStruct(), icebergTable.schema().asStruct());
    shell.executeStatement("DROP TABLE default.customers");
    // Check if the table was really dropped even from the Catalog
    AssertHelpers.assertThrows("should throw exception", NoSuchTableException.class, "Table does not exist", () -> {
        testTables.loadTable(identifier);
    });
}
Also used : TableIdentifier(org.apache.iceberg.catalog.TableIdentifier) BaseTable(org.apache.iceberg.BaseTable) Table(org.apache.iceberg.Table) Test(org.junit.Test)

Example 57 with TableIdentifier

use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.

the class TestHiveIcebergStorageHandlerNoScan method testDropHiveTableWithoutUnderlyingTable.

@Test
public void testDropHiveTableWithoutUnderlyingTable() throws IOException {
    Assume.assumeFalse("Not relevant for HiveCatalog", testTableType.equals(TestTables.TestTableType.HIVE_CATALOG));
    TableIdentifier identifier = TableIdentifier.of("default", "customers");
    // Create the Iceberg table in non-HiveCatalog
    testTables.createIcebergTable(shell.getHiveConf(), identifier.name(), HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA, FileFormat.PARQUET, Collections.emptyMap(), HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS);
    // Create Hive table on top
    String tableLocation = testTables.locationForCreateTableSQL(identifier);
    shell.executeStatement(testTables.createHiveTableSQL(identifier, ImmutableMap.of(InputFormatConfig.EXTERNAL_TABLE_PURGE, "TRUE")));
    // Drop the Iceberg table
    Properties properties = new Properties();
    properties.put(Catalogs.NAME, identifier.toString());
    properties.put(Catalogs.LOCATION, tableLocation);
    Catalogs.dropTable(shell.getHiveConf(), properties);
    // Finally drop the Hive table as well
    shell.executeStatement("DROP TABLE " + identifier);
}
Also used : TableIdentifier(org.apache.iceberg.catalog.TableIdentifier) TableProperties(org.apache.iceberg.TableProperties) Properties(java.util.Properties) Test(org.junit.Test)

Example 58 with TableIdentifier

use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.

the class TestHiveIcebergStorageHandlerNoScan method testCreateTableError.

@Test
public void testCreateTableError() {
    TableIdentifier identifier = TableIdentifier.of("default", "withShell2");
    // Wrong schema
    AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Unrecognized token 'WrongSchema'", () -> {
        shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + "TBLPROPERTIES ('" + InputFormatConfig.TABLE_SCHEMA + "'='WrongSchema'" + ",'" + InputFormatConfig.CATALOG_NAME + "'='" + testTables.catalogName() + "')");
    });
    // Missing schema, we try to get the schema from the table and fail
    AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Please provide ", () -> {
        shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + testTables.locationForCreateTableSQL(identifier) + testTables.propertiesForCreateTableSQL(ImmutableMap.of()));
    });
    if (!testTables.locationForCreateTableSQL(identifier).isEmpty()) {
        // Only test this if the location is required
        AssertHelpers.assertThrows("should throw exception", IllegalArgumentException.class, "Table location not set", () -> {
            shell.executeStatement("CREATE EXTERNAL TABLE withShell2 " + "STORED BY ICEBERG " + "TBLPROPERTIES ('" + InputFormatConfig.TABLE_SCHEMA + "'='" + SchemaParser.toJson(HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA) + "','" + InputFormatConfig.CATALOG_NAME + "'='" + testTables.catalogName() + "')");
        });
    }
}
Also used : TableIdentifier(org.apache.iceberg.catalog.TableIdentifier) Test(org.junit.Test)

Example 59 with TableIdentifier

use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.

the class TestHiveIcebergStorageHandlerNoScan method testAlterTableReplaceColumnsFailsWhenNotOnlyDropping.

@Test
public void testAlterTableReplaceColumnsFailsWhenNotOnlyDropping() {
    TableIdentifier identifier = TableIdentifier.of("default", "customers");
    Schema schema = new Schema(optional(1, "customer_id", Types.IntegerType.get()), optional(2, "first_name", Types.StringType.get(), "This is first name"), optional(3, "last_name", Types.StringType.get(), "This is last name"), optional(4, "address", Types.StructType.of(optional(5, "city", Types.StringType.get()), optional(6, "street", Types.StringType.get())), null));
    testTables.createTable(shell, identifier.name(), schema, SPEC, FileFormat.PARQUET, ImmutableList.of());
    // check unsupported operations
    String[] commands = { // type promotion
    "ALTER TABLE default.customers REPLACE COLUMNS (customer_id bigint, first_name string COMMENT 'This is " + "first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // delete a comment
    "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string, " + "last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // change a comment
    "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'New docs', " + "last_name string COMMENT 'This is last name', address struct<city:string,street:string>)", // reorder columns
    "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, last_name string COMMENT 'This is " + "last name', first_name string COMMENT 'This is first name', address struct<city:string,street:string>)", // add new column
    "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'This is " + "first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>, " + "new_col timestamp)", // dropping a column + reordering columns
    "ALTER TABLE default.customers REPLACE COLUMNS (last_name string COMMENT 'This is " + "last name', first_name string COMMENT 'This is first name', address struct<city:string,street:string>)" };
    for (String command : commands) {
        AssertHelpers.assertThrows("", IllegalArgumentException.class, "Unsupported operation to use REPLACE COLUMNS", () -> shell.executeStatement(command));
    }
    // check no-op case too
    String command = "ALTER TABLE default.customers REPLACE COLUMNS (customer_id int, first_name string COMMENT 'This" + " is first name', last_name string COMMENT 'This is last name', address struct<city:string,street:string>)";
    AssertHelpers.assertThrows("", IllegalArgumentException.class, "No schema change detected", () -> shell.executeStatement(command));
}
Also used : TableIdentifier(org.apache.iceberg.catalog.TableIdentifier) UpdateSchema(org.apache.iceberg.UpdateSchema) Schema(org.apache.iceberg.Schema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Test(org.junit.Test)

Example 60 with TableIdentifier

use of org.apache.iceberg.catalog.TableIdentifier in project hive by apache.

the class TestHiveIcebergStorageHandlerWithMultipleCatalogs method testCTASFromOtherCatalogFailureRollback.

@Test
public void testCTASFromOtherCatalogFailureRollback() throws IOException {
    // force an execution error by passing in a committer class that Tez won't be able to load
    shell.setHiveSessionValue("hive.tez.mapreduce.output.committer.class", "org.apache.NotExistingClass");
    TableIdentifier target = TableIdentifier.of("default", "target");
    testTables2.createTable(shell, "source", HiveIcebergStorageHandlerTestUtils.CUSTOMER_SCHEMA, fileFormat2, HiveIcebergStorageHandlerTestUtils.CUSTOMER_RECORDS);
    AssertHelpers.assertThrows("Should fail while loading non-existent output committer class.", IllegalArgumentException.class, "org.apache.NotExistingClass", () -> shell.executeStatement(String.format("CREATE TABLE target STORED BY ICEBERG TBLPROPERTIES ('%s'='%s') AS SELECT * FROM source", InputFormatConfig.CATALOG_NAME, HIVECATALOGNAME)));
    // CTAS table should have been dropped by the lifecycle hook
    Assert.assertThrows(NoSuchTableException.class, () -> testTables1.loadTable(target));
}
Also used : TableIdentifier(org.apache.iceberg.catalog.TableIdentifier) Test(org.junit.Test)

Aggregations

TableIdentifier (org.apache.iceberg.catalog.TableIdentifier)87 Test (org.junit.Test)69 Table (org.apache.iceberg.Table)56 PartitionSpec (org.apache.iceberg.PartitionSpec)27 Schema (org.apache.iceberg.Schema)25 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)16 BaseTable (org.apache.iceberg.BaseTable)15 UpdateSchema (org.apache.iceberg.UpdateSchema)15 List (java.util.List)13 NoSuchTableException (org.apache.iceberg.exceptions.NoSuchTableException)13 ArrayList (java.util.ArrayList)11 ImmutableList (org.apache.iceberg.relocated.com.google.common.collect.ImmutableList)11 IOException (java.io.IOException)10 Map (java.util.Map)10 Types (org.apache.iceberg.types.Types)10 HashMap (java.util.HashMap)9 Path (org.apache.hadoop.fs.Path)9 TableProperties (org.apache.iceberg.TableProperties)9 Collections (java.util.Collections)8 Properties (java.util.Properties)8