Search in sources :

Example 36 with HCatException

use of org.apache.hive.hcatalog.common.HCatException in project hive by apache.

the class HCatPartition method toHivePartition.

// For use from HCatClient.addPartitions(), to construct from user-input.
Partition toHivePartition() throws HCatException {
    Partition hivePtn = new Partition();
    hivePtn.setDbName(dbName);
    hivePtn.setTableName(tableName);
    hivePtn.setValues(values);
    hivePtn.setParameters(parameters);
    if (sd.getLocation() == null) {
        LOG.warn("Partition location is not set! Attempting to construct default partition location.");
        try {
            String partName = Warehouse.makePartName(HCatSchemaUtils.getFieldSchemas(hcatTable.getPartCols()), values);
            sd.setLocation(new Path(hcatTable.getSd().getLocation(), partName).toString());
        } catch (MetaException exception) {
            throw new HCatException("Could not construct default partition-path for " + hcatTable.getDbName() + "." + hcatTable.getTableName() + "[" + values + "]");
        }
    }
    hivePtn.setSd(sd);
    hivePtn.setCreateTime((int) (System.currentTimeMillis() / 1000));
    hivePtn.setLastAccessTimeIsSet(false);
    return hivePtn;
}
Also used : Path(org.apache.hadoop.fs.Path) Partition(org.apache.hadoop.hive.metastore.api.Partition) HCatException(org.apache.hive.hcatalog.common.HCatException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 37 with HCatException

use of org.apache.hive.hcatalog.common.HCatException in project hive by apache.

the class HCatTable method toHiveTable.

Table toHiveTable() throws HCatException {
    Table newTable = new Table();
    newTable.setDbName(dbName);
    newTable.setTableName(tableName);
    if (tblProps != null) {
        newTable.setParameters(tblProps);
    }
    if (isExternal) {
        newTable.putToParameters("EXTERNAL", "TRUE");
        newTable.setTableType(TableType.EXTERNAL_TABLE.toString());
    } else {
        newTable.setTableType(TableType.MANAGED_TABLE.toString());
    }
    if (StringUtils.isNotBlank(this.comment)) {
        newTable.putToParameters("comment", comment);
    }
    newTable.setSd(sd);
    if (partCols != null) {
        ArrayList<FieldSchema> hivePtnCols = new ArrayList<FieldSchema>();
        for (HCatFieldSchema fs : partCols) {
            hivePtnCols.add(HCatSchemaUtils.getFieldSchema(fs));
        }
        newTable.setPartitionKeys(hivePtnCols);
    }
    newTable.setCreateTime((int) (System.currentTimeMillis() / 1000));
    newTable.setLastAccessTimeIsSet(false);
    try {
        // TODO: Verify that this works for systems using UGI.doAs() (e.g. Oozie).
        newTable.setOwner(owner == null ? getConf().getUser() : owner);
    } catch (Exception exception) {
        throw new HCatException("Unable to determine owner of table (" + dbName + "." + tableName + ") from HiveConf.");
    }
    return newTable;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) HCatException(org.apache.hive.hcatalog.common.HCatException) HCatException(org.apache.hive.hcatalog.common.HCatException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema)

Example 38 with HCatException

use of org.apache.hive.hcatalog.common.HCatException in project hive by apache.

the class MetadataJSONSerializer method deserializePartitionSpec.

@Override
public HCatPartitionSpec deserializePartitionSpec(List<String> hcatPartitionSpecStrings) throws HCatException {
    try {
        List<PartitionSpec> partitionSpecList = new ArrayList<PartitionSpec>();
        TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
        for (String stringRep : hcatPartitionSpecStrings) {
            PartitionSpec partSpec = new PartitionSpec();
            deserializer.deserialize(partSpec, stringRep, "UTF-8");
            partitionSpecList.add(partSpec);
        }
        return new HCatPartitionSpec(null, PartitionSpecProxy.Factory.get(partitionSpecList));
    } catch (TException deserializationException) {
        throw new HCatException("Failed to deserialize!", deserializationException);
    }
}
Also used : TException(org.apache.thrift.TException) TJSONProtocol(org.apache.thrift.protocol.TJSONProtocol) TDeserializer(org.apache.thrift.TDeserializer) ArrayList(java.util.ArrayList) HCatException(org.apache.hive.hcatalog.common.HCatException) PartitionSpec(org.apache.hadoop.hive.metastore.api.PartitionSpec)

Example 39 with HCatException

use of org.apache.hive.hcatalog.common.HCatException in project hive by apache.

the class TestHCatClient method testDropTableException.

@Test
public void testDropTableException() throws Exception {
    HCatClient client = HCatClient.create(new Configuration(hcatConf));
    String tableName = "tableToBeDropped";
    boolean isExceptionCaught = false;
    client.dropTable(null, tableName, true);
    try {
        client.dropTable(null, tableName, false);
    } catch (Exception exp) {
        isExceptionCaught = true;
        assertTrue(exp instanceof HCatException);
        LOG.info("Drop Table Exception: " + exp.getCause());
    } finally {
        client.close();
        assertTrue("The expected exception was never thrown.", isExceptionCaught);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HCatException(org.apache.hive.hcatalog.common.HCatException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) IOException(java.io.IOException) Test(org.junit.Test)

Example 40 with HCatException

use of org.apache.hive.hcatalog.common.HCatException in project hive by apache.

the class TestHCatClient method testOtherFailure.

@Test
public void testOtherFailure() throws Exception {
    HCatClient client = HCatClient.create(new Configuration(hcatConf));
    String tableName = "Temptable";
    boolean isExceptionCaught = false;
    client.dropTable(null, tableName, true);
    ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
    cols.add(new HCatFieldSchema("id", Type.INT, "id columns"));
    cols.add(new HCatFieldSchema("value", Type.STRING, "id columns"));
    try {
        HCatCreateTableDesc tableDesc = HCatCreateTableDesc.create(null, tableName, cols).fileFormat("rcfile").build();
        client.createTable(tableDesc);
        // The DB foo is non-existent.
        client.getTable("foo", tableName);
    } catch (Exception exp) {
        isExceptionCaught = true;
        assertTrue(exp instanceof HCatException);
        String newName = "goodTable";
        client.dropTable(null, newName, true);
        HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc.create(null, newName, cols).fileFormat("rcfile").build();
        client.createTable(tableDesc2);
        HCatTable newTable = client.getTable(null, newName);
        assertTrue(newTable != null);
        assertTrue(newTable.getTableName().equalsIgnoreCase(newName));
    } finally {
        client.close();
        assertTrue("The expected exception was never thrown.", isExceptionCaught);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ArrayList(java.util.ArrayList) HCatException(org.apache.hive.hcatalog.common.HCatException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) IOException(java.io.IOException) HCatFieldSchema(org.apache.hive.hcatalog.data.schema.HCatFieldSchema) Test(org.junit.Test)

Aggregations

HCatException (org.apache.hive.hcatalog.common.HCatException)52 IOException (java.io.IOException)23 ArrayList (java.util.ArrayList)20 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)19 TException (org.apache.thrift.TException)14 HCatFieldSchema (org.apache.hive.hcatalog.data.schema.HCatFieldSchema)13 HashMap (java.util.HashMap)11 Test (org.junit.Test)11 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)10 Configuration (org.apache.hadoop.conf.Configuration)9 Path (org.apache.hadoop.fs.Path)9 Partition (org.apache.hadoop.hive.metastore.api.Partition)8 Table (org.apache.hadoop.hive.metastore.api.Table)8 HCatSchema (org.apache.hive.hcatalog.data.schema.HCatSchema)7 Job (org.apache.hadoop.mapreduce.Job)6 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)5 FileSystem (org.apache.hadoop.fs.FileSystem)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 HCatRecord (org.apache.hive.hcatalog.data.HCatRecord)4 Map (java.util.Map)3