Search in sources :

Example 11 with Type

use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.

the class TestAcidTableSetup method testTransactionalValidation.

@Test
public void testTransactionalValidation() throws Throwable {
    String dbName = "acidDb";
    silentDropDatabase(dbName);
    Database db = new Database();
    db.setName(dbName);
    client.createDatabase(db);
    String tblName = "acidTable";
    Map<String, String> fields = new HashMap<>();
    fields.put("name", ColumnType.STRING_TYPE_NAME);
    fields.put("income", ColumnType.INT_TYPE_NAME);
    Type type = createType("Person1", fields);
    Map<String, String> params = new HashMap<>();
    params.put("transactional", "");
    // Fail - No "transactional" property is specified
    try {
        Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
        client.createTable(t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("'transactional' property of TBLPROPERTIES may only have value 'true': acidDb.acidTable", e.getMessage());
    }
    // Fail - "transactional" property is set to an invalid value
    try {
        params.clear();
        params.put("transactional", "foobar");
        Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
        client.createTable(t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("'transactional' property of TBLPROPERTIES may only have value 'true': acidDb.acidTable", e.getMessage());
    }
    // Fail - "transactional" is set to true, but the table is not bucketed
    try {
        params.clear();
        params.put("transactional", "true");
        Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).build();
        client.createTable(t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable", e.getMessage());
    }
    List<String> bucketCols = new ArrayList<>();
    bucketCols.add("income");
    // Fail - "transactional" is set to true, and the table is bucketed, but doesn't use ORC
    try {
        params.clear();
        params.put("transactional", "true");
        Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).setBucketCols(bucketCols).build();
        client.createTable(t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable", e.getMessage());
    }
    // Succeed - "transactional" is set to true, and the table is bucketed, and uses ORC
    params.clear();
    params.put("transactional", "true");
    Table t = new TableBuilder().setDbName(dbName).setTableName(tblName).setTableParams(params).setCols(type.getFields()).setBucketCols(bucketCols).setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat").build();
    client.createTable(t);
    assertTrue("CREATE TABLE should succeed", "true".equals(t.getParameters().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL)));
    // Fail - trying to set "transactional" to "false" is not allowed
    try {
        params.clear();
        params.put("transactional", "false");
        t = new Table();
        t.setParameters(params);
        t.setDbName(dbName);
        t.setTableName(tblName);
        client.alter_table(dbName, tblName, t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("TBLPROPERTIES with 'transactional'='true' cannot be unset: acidDb.acidTable", e.getMessage());
    }
    // Fail - trying to set "transactional" to "true" but doesn't satisfy bucketing and Input/OutputFormat requirement
    try {
        tblName += "1";
        params.clear();
        t = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(type.getFields()).setInputFormat("org.apache.hadoop.mapred.FileInputFormat").build();
        client.createTable(t);
        params.put("transactional", "true");
        t.setParameters(params);
        client.alter_table(dbName, tblName, t);
        fail("Expected exception");
    } catch (MetaException e) {
        assertEquals("The table must be stored using an ACID compliant format (such as ORC): acidDb.acidTable1", e.getMessage());
    }
    // Succeed - trying to set "transactional" to "true", and satisfies bucketing and Input/OutputFormat requirement
    tblName += "2";
    params.clear();
    t = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(type.getFields()).setNumBuckets(1).setBucketCols(bucketCols).setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat").build();
    client.createTable(t);
    params.put("transactional", "true");
    t.setParameters(params);
    client.alter_table(dbName, tblName, t);
    assertTrue("ALTER TABLE should succeed", "true".equals(t.getParameters().get(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL)));
}
Also used : Type(org.apache.hadoop.hive.metastore.api.Type) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 12 with Type

use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.

the class TestHiveMetaStore method testComplexTable.

@Test
public void testComplexTable() throws Exception {
    String dbName = "compdb";
    String tblName = "comptbl";
    String typeName = "Person";
    try {
        client.dropTable(dbName, tblName);
        silentDropDatabase(dbName);
        Database db = new Database();
        db.setName(dbName);
        client.createDatabase(db);
        client.dropType(typeName);
        Type typ1 = new Type();
        typ1.setName(typeName);
        typ1.setFields(new ArrayList<>(2));
        typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
        typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
        client.createType(typ1);
        Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(typ1.getFields()).addPartCol("ds", ColumnType.DATE_TYPE_NAME).addPartCol("hr", ColumnType.INT_TYPE_NAME).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").build();
        client.createTable(tbl);
        Table tbl2 = client.getTable(dbName, tblName);
        assertEquals(tbl2.getDbName(), dbName);
        assertEquals(tbl2.getTableName(), tblName);
        assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
        assertFalse(tbl2.getSd().isCompressed());
        assertFalse(tbl2.getSd().isStoredAsSubDirectories());
        assertEquals(tbl2.getSd().getNumBuckets(), 1);
        assertEquals("Use this for comments etc", tbl2.getSd().getParameters().get("test_param_1"));
        assertEquals("name", tbl2.getSd().getBucketCols().get(0));
        assertNotNull(tbl2.getPartitionKeys());
        assertEquals(2, tbl2.getPartitionKeys().size());
        assertEquals(ColumnType.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0).getType());
        assertEquals(ColumnType.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1).getType());
        assertEquals("ds", tbl2.getPartitionKeys().get(0).getName());
        assertEquals("hr", tbl2.getPartitionKeys().get(1).getName());
        List<FieldSchema> fieldSchemas = client.getFields(dbName, tblName);
        assertNotNull(fieldSchemas);
        assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
        for (FieldSchema fs : tbl.getSd().getCols()) {
            assertTrue(fieldSchemas.contains(fs));
        }
        List<FieldSchema> fieldSchemasFull = client.getSchema(dbName, tblName);
        assertNotNull(fieldSchemasFull);
        assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size() + tbl.getPartitionKeys().size());
        for (FieldSchema fs : tbl.getSd().getCols()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
        for (FieldSchema fs : tbl.getPartitionKeys()) {
            assertTrue(fieldSchemasFull.contains(fs));
        }
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testComplexTable() failed.");
        throw e;
    } finally {
        client.dropTable(dbName, tblName);
        boolean ret = client.dropType(typeName);
        assertTrue("Unable to drop type " + typeName, ret);
        client.dropDatabase(dbName);
    }
}
Also used : Type(org.apache.hadoop.hive.metastore.api.Type) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 13 with Type

use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.

the class TestHiveMetaStore method createType.

private Type createType(String typeName, Map<String, String> fields) throws Throwable {
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(fields.size()));
    for (String fieldName : fields.keySet()) {
        typ1.getFields().add(new FieldSchema(fieldName, fields.get(fieldName), ""));
    }
    client.createType(typ1);
    return typ1;
}
Also used : Type(org.apache.hadoop.hive.metastore.api.Type) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema)

Example 14 with Type

use of org.apache.hadoop.hive.metastore.api.Type in project hive by apache.

the class TestHiveMetaStore method testSimpleTypeApi.

@Test
public void testSimpleTypeApi() throws Exception {
    try {
        client.dropType(ColumnType.INT_TYPE_NAME);
        Type typ1 = new Type();
        typ1.setName(ColumnType.INT_TYPE_NAME);
        boolean ret = client.createType(typ1);
        assertTrue("Unable to create type", ret);
        Type typ1_2 = client.getType(ColumnType.INT_TYPE_NAME);
        assertNotNull(typ1_2);
        assertEquals(typ1.getName(), typ1_2.getName());
        ret = client.dropType(ColumnType.INT_TYPE_NAME);
        assertTrue("unable to drop type integer", ret);
        boolean exceptionThrown = false;
        try {
            client.getType(ColumnType.INT_TYPE_NAME);
        } catch (NoSuchObjectException e) {
            exceptionThrown = true;
        }
        assertTrue("Expected NoSuchObjectException", exceptionThrown);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testSimpleTypeApi() failed.");
        throw e;
    }
}
Also used : Type(org.apache.hadoop.hive.metastore.api.Type) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Aggregations

Type (org.apache.hadoop.hive.metastore.api.Type)14 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)10 FunctionType (org.apache.hadoop.hive.metastore.api.FunctionType)10 PrincipalType (org.apache.hadoop.hive.metastore.api.PrincipalType)10 ResourceType (org.apache.hadoop.hive.metastore.api.ResourceType)10 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)8 Table (org.apache.hadoop.hive.metastore.api.Table)8 Database (org.apache.hadoop.hive.metastore.api.Database)7 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)7 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)7 SQLException (java.sql.SQLException)6 ArrayList (java.util.ArrayList)6 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)6 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)6 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)6 TException (org.apache.thrift.TException)6 Test (org.junit.Test)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)4 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)4