use of org.apache.hadoop.hive.ql.ddl.table.partition.add.AlterTableAddPartitionDesc in project hive by apache.
the class FSTableEvent method addPartitionDesc.
private AlterTableAddPartitionDesc addPartitionDesc(Path fromPath, ImportTableDesc tblDesc, Partition partition) throws SemanticException {
try {
Map<String, String> partitionSpec = EximUtil.makePartSpec(tblDesc.getPartCols(), partition.getValues());
StorageDescriptor sd = partition.getSd();
String location = sd.getLocation();
if (!tblDesc.isExternal()) {
/**
* this is required for file listing of all files in a partition for managed table as described in
* {@link org.apache.hadoop.hive.ql.exec.repl.bootstrap.events.filesystem.BootstrapEventsIterator}
*/
location = new Path(fromPath, Warehouse.makePartName(tblDesc.getPartCols(), partition.getValues())).toString();
}
ColumnStatistics columnStatistics = null;
long writeId = -1;
if (partition.isSetColStats()) {
ColumnStatistics colStats = partition.getColStats();
ColumnStatisticsDesc colStatsDesc = new ColumnStatisticsDesc(colStats.getStatsDesc());
colStatsDesc.setTableName(tblDesc.getTableName());
colStatsDesc.setDbName(tblDesc.getDatabaseName());
columnStatistics = new ColumnStatistics(colStatsDesc, colStats.getStatsObj());
columnStatistics.setEngine(colStats.getEngine());
writeId = partition.getWriteId();
}
AlterTableAddPartitionDesc.PartitionDesc partitionDesc = new AlterTableAddPartitionDesc.PartitionDesc(partitionSpec, location, partition.getParameters(), sd.getInputFormat(), sd.getOutputFormat(), sd.getNumBuckets(), sd.getCols(), sd.getSerdeInfo().getSerializationLib(), sd.getSerdeInfo().getParameters(), sd.getBucketCols(), sd.getSortCols(), columnStatistics, writeId);
AlterTableAddPartitionDesc addPartitionDesc = new AlterTableAddPartitionDesc(tblDesc.getDatabaseName(), tblDesc.getTableName(), true, ImmutableList.of(partitionDesc));
addPartitionDesc.setReplicationSpec(replicationSpec());
return addPartitionDesc;
} catch (Exception e) {
throw new SemanticException(e);
}
}
Aggregations