Search in sources :

Example 41 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestAddPartitionsFromPartSpec method testAddPartitionSpecNullValue.

@Test
public void testAddPartitionSpecNullValue() throws Exception {
    createTable();
    Partition partition = buildPartition(DB_NAME, TABLE_NAME, null);
    PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, Lists.newArrayList(partition));
    try {
        client.add_partitions_pspec(partitionSpecProxy);
    } catch (NullPointerException e) {
    // TODO: NPE should not be thrown
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 42 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class HiveMetaStoreClient method alter_table_with_environmentContext.

@Override
public void alter_table_with_environmentContext(String dbname, String tbl_name, Table new_tbl, EnvironmentContext envContext) throws InvalidOperationException, MetaException, TException {
    HiveMetaHook hook = getHook(new_tbl);
    if (hook != null) {
        hook.preAlterTable(new_tbl, envContext);
    }
    AlterTableRequest req = new AlterTableRequest(dbname, tbl_name, new_tbl);
    req.setCatName(MetaStoreUtils.getDefaultCatalog(conf));
    req.setEnvironmentContext(envContext);
    if (processorCapabilities != null) {
        req.setProcessorCapabilities(new ArrayList<String>(Arrays.asList(processorCapabilities)));
        req.setProcessorIdentifier(processorIdentifier);
    }
    boolean success = false;
    try {
        client.alter_table_req(req);
        if (hook != null) {
            PartitionSpecProxy partitionSpecProxy = listPartitionSpecs(dbname, tbl_name, Integer.MAX_VALUE);
            hook.commitAlterTable(new_tbl, envContext, partitionSpecProxy);
        }
        success = true;
    } finally {
        if (!success && (hook != null)) {
            hook.rollbackAlterTable(new_tbl, envContext);
        }
    }
}
Also used : PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy)

Example 43 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestAddPartitionsFromPartSpec method testAddPartitionSpecChangeRootPathDiffInSd.

@Test(expected = MetaException.class)
public void testAddPartitionSpecChangeRootPathDiffInSd() throws Exception {
    Table table = createTable();
    String rootPath = table.getSd().getLocation() + "/addPartSpecRootPath/";
    String rootPath1 = table.getSd().getLocation() + "/addPartSdPath/";
    String rootPath2 = table.getSd().getLocation() + "/someotherpath/";
    Partition partition = buildPartition(DB_NAME, TABLE_NAME, "2007", rootPath1 + "part2007/");
    PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, rootPath, Lists.newArrayList(partition));
    partitionSpecProxy.setRootLocation(rootPath2);
    client.add_partitions_pspec(partitionSpecProxy);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 44 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestAddPartitionsFromPartSpec method testAddPartitionSpecDiffDBName.

@Test
public void testAddPartitionSpecDiffDBName() throws Exception {
    createDB("NewPartDB");
    createTable();
    createTable("NewPartDB", "NewPartTable", getYearPartCol(), null);
    List<Partition> partitions = new ArrayList<>();
    Partition partition1 = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE);
    Partition partition2 = buildPartition("NewPartDB", "NewPartTable", DEFAULT_YEAR_VALUE);
    partitions.add(partition1);
    partitions.add(partition2);
    PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, partitions);
    try {
        client.add_partitions_pspec(partitionSpecProxy);
        Assert.fail("MetaException should have been thrown.");
    } catch (MetaException e) {
    // Expected exception
    } finally {
        client.dropDatabase("NewPartDB", true, true, true);
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) ArrayList(java.util.ArrayList) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 45 with PartitionSpecProxy

use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.

the class TestAddPartitionsFromPartSpec method testAddPartitionSpecNoDB.

@Test(expected = MetaException.class)
public void testAddPartitionSpecNoDB() throws Exception {
    createTable();
    Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE);
    PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(null, TABLE_NAME, null, Lists.newArrayList(partition));
    client.add_partitions_pspec(partitionSpecProxy);
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

PartitionSpecProxy (org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy)69 Test (org.junit.Test)60 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)59 Partition (org.apache.hadoop.hive.metastore.api.Partition)53 Table (org.apache.hadoop.hive.metastore.api.Table)24 PartitionWithoutSD (org.apache.hadoop.hive.metastore.api.PartitionWithoutSD)15 ArrayList (java.util.ArrayList)13 Path (org.apache.hadoop.fs.Path)11 List (java.util.List)7 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)7 HashMap (java.util.HashMap)4 PartitionSpec (org.apache.hadoop.hive.metastore.api.PartitionSpec)4 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)3 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)3 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)3 CompositePartitionSpecProxy (org.apache.hadoop.hive.metastore.partition.spec.CompositePartitionSpecProxy)3 PartitionSpecWithSharedSD (org.apache.hadoop.hive.metastore.api.PartitionSpecWithSharedSD)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2