use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecForViewNullPartLocation.
@Test
@ConditionalIgnoreOnSessionHiveMetastoreClient
public void testAddPartitionSpecForViewNullPartLocation() throws Exception {
String tableName = "test_add_partition_view";
createView(tableName);
Partition partition = buildPartition(DB_NAME, tableName, DEFAULT_YEAR_VALUE);
partition.getSd().setLocation(null);
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, tableName, null, Lists.newArrayList(partition));
client.add_partitions_pspec(partitionSpecProxy);
Partition part = client.getPartition(DB_NAME, tableName, "year=2017");
Assert.assertNull(part.getSd().getLocation());
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecWithSharedSDUpperCaseDBAndTableName.
@Test
public void testAddPartitionSpecWithSharedSDUpperCaseDBAndTableName() throws Exception {
Table table = createTable();
PartitionWithoutSD partition = buildPartitionWithoutSD(Lists.newArrayList("2013"), 1);
List<PartitionWithoutSD> partitions = Lists.newArrayList(partition);
String location = table.getSd().getLocation() + "/sharedSDTest/";
PartitionSpec partitionSpec = new PartitionSpec();
partitionSpec.setDbName(DB_NAME.toUpperCase());
partitionSpec.setTableName(TABLE_NAME.toUpperCase());
PartitionSpecWithSharedSD partitionList = new PartitionSpecWithSharedSD();
partitionList.setPartitions(partitions);
partitionList.setSd(buildSD(location));
partitionSpec.setSharedSDPartitionSpec(partitionList);
PartitionSpecProxy partitionSpecProxy = PartitionSpecProxy.Factory.get(partitionSpec);
client.add_partitions_pspec(partitionSpecProxy);
Partition part = client.getPartition(DB_NAME, TABLE_NAME, "year=2013");
Assert.assertNotNull(part);
Assert.assertEquals(DB_NAME, part.getDbName());
Assert.assertEquals(TABLE_NAME, part.getTableName());
Assert.assertEquals(metaStore.getWarehouseRoot() + "/" + TABLE_NAME + "/sharedSDTest/partwithoutsd1", part.getSd().getLocation());
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecChangeRootPathFromNull.
@Test(expected = MetaException.class)
public void testAddPartitionSpecChangeRootPathFromNull() throws Exception {
Table table = createTable();
String rootPath = table.getSd().getLocation() + "/addPartSpecRootPath/";
String rootPath1 = table.getSd().getLocation() + "/someotherpath/";
Partition partition = buildPartition(DB_NAME, TABLE_NAME, "2007", rootPath + "part2007/");
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, Lists.newArrayList(partition));
partitionSpecProxy.setRootLocation(rootPath1);
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecNonExistingDB.
@Test(expected = InvalidObjectException.class)
public void testAddPartitionSpecNonExistingDB() throws Exception {
createTable();
Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE);
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec("nonexistingdb", TABLE_NAME, null, Lists.newArrayList(partition));
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecMoreValues.
@Test(expected = MetaException.class)
public void testAddPartitionSpecMoreValues() throws Exception {
createTable();
Partition partition = buildPartition(Lists.newArrayList("2017", "march"), getYearAndMonthPartCols(), 1);
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, Lists.newArrayList(partition));
client.add_partitions_pspec(partitionSpecProxy);
}
Aggregations