use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecPartDuplicateInSpecs.
@Test(expected = MetaException.class)
public void testAddPartitionSpecPartDuplicateInSpecs() throws Exception {
createTable(DB_NAME, TABLE_NAME, getYearPartCol(), metaStore.getWarehouseRoot() + "/" + TABLE_NAME);
Partition partition = buildPartition(Lists.newArrayList("2002"), getYearPartCol(), 1);
PartitionWithoutSD partitionWithoutSD = buildPartitionWithoutSD(Lists.newArrayList("2002"), 0);
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(Lists.newArrayList(partition), Lists.newArrayList(partitionWithoutSD));
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecNullValues.
@Test(expected = MetaException.class)
public void testAddPartitionSpecNullValues() throws Exception {
createTable();
Partition partition = buildPartition(DB_NAME, TABLE_NAME, null);
partition.setValues(null);
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, Lists.newArrayList(partition));
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecWithSharedSDNullSd.
@Test(expected = MetaException.class)
public void testAddPartitionSpecWithSharedSDNullSd() throws Exception {
createTable();
PartitionWithoutSD partition = buildPartitionWithoutSD(Lists.newArrayList("2002"), 0);
StorageDescriptor sd = null;
PartitionSpecProxy partitionSpecProxy = buildPartitionSpecWithSharedSD(Lists.newArrayList(partition), sd);
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecNoValue.
@Test(expected = MetaException.class)
public void testAddPartitionSpecNoValue() throws Exception {
createTable();
Partition partition = new PartitionBuilder().setDbName(DB_NAME).setTableName(TABLE_NAME).addCol(YEAR_COL_NAME, DEFAULT_COL_TYPE).setLocation(metaStore.getWarehouseRoot() + "/addpartspectest").build(metaStore.getConf());
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, Lists.newArrayList(partition));
client.add_partitions_pspec(partitionSpecProxy);
}
use of org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy in project hive by apache.
the class TestAddPartitionsFromPartSpec method testAddPartitionSpecMoreThanThreadCountsOneFails.
@Test
public void testAddPartitionSpecMoreThanThreadCountsOneFails() throws Exception {
createTable();
String tableLocation = metaStore.getWarehouseRoot() + "/" + TABLE_NAME;
List<Partition> partitions = new ArrayList<>();
for (int i = 0; i < 50; i++) {
String value = String.valueOf(2000 + i);
String location = tableLocation + "/year=" + value;
if (i == 30) {
location = "invalidhost:80000/wrongfolder";
}
Partition partition = buildPartition(DB_NAME, TABLE_NAME, value, location);
partitions.add(partition);
}
PartitionSpecProxy partitionSpecProxy = buildPartitionSpec(DB_NAME, TABLE_NAME, null, partitions);
try {
client.add_partitions_pspec(partitionSpecProxy);
Assert.fail("MetaException should have happened.");
} catch (MetaException e) {
// Expected exception
}
List<Partition> parts = client.listPartitions(DB_NAME, TABLE_NAME, MAX);
Assert.assertNotNull(parts);
Assert.assertTrue(parts.isEmpty());
for (Partition partition : partitions) {
if (!"invalidhost:80000/wrongfolder".equals(partition.getSd().getLocation())) {
Assert.assertFalse(metaStore.isPathExists(new Path(partition.getSd().getLocation())));
}
}
}
Aggregations