use of io.prestosql.plugin.hive.TypeTranslator in project hetu-core by openlookeng.
the class CarbondataMetadata method updateSchemaInfoAddColumn.
private SchemaEvolutionEntry updateSchemaInfoAddColumn(ColumnMetadata column) {
HiveColumnHandle columnHandle = new HiveColumnHandle(column.getName(), HiveType.toHiveType(typeTranslator, column.getType()), column.getType().getTypeSignature(), tableInfo.getFactTable().getListOfColumns().size(), HiveColumnHandle.ColumnType.REGULAR, Optional.empty());
TableSchema tableSchema = tableInfo.getFactTable();
List<ColumnSchema> tableColumns = tableSchema.getListOfColumns();
int currentSchemaOrdinal = tableColumns.stream().max(Comparator.comparing(ColumnSchema::getSchemaOrdinal)).orElseThrow(NoSuchElementException::new).getSchemaOrdinal() + 1;
List<ColumnSchema> longStringColumns = new ArrayList<>();
List<ColumnSchema> allColumns = tableColumns.stream().filter(cols -> cols.isDimensionColumn() && !cols.getDataType().isComplexType() && cols.getSchemaOrdinal() != -1 && (cols.getDataType() != DataTypes.VARCHAR)).collect(toList());
TableSchemaBuilder schemaBuilder = new TableSchemaBuilder();
List<ColumnSchema> columnSchemas = new ArrayList<ColumnSchema>();
ColumnSchema newColumn = schemaBuilder.addColumn(new StructField(columnHandle.getName(), CarbondataHetuFilterUtil.spi2CarbondataTypeMapper(columnHandle)), null, false, false);
newColumn.setSchemaOrdinal(currentSchemaOrdinal);
columnSchemas.add(newColumn);
if (newColumn.getDataType() == DataTypes.VARCHAR) {
longStringColumns.add(newColumn);
} else if (newColumn.isDimensionColumn()) {
// add the column which is not long string
allColumns.add(newColumn);
}
// put the old long string columns
allColumns.addAll(tableColumns.stream().filter(cols -> cols.isDimensionColumn() && (cols.getDataType() == DataTypes.VARCHAR)).collect(toList()));
// and the new long string column after old long string columns
allColumns.addAll(longStringColumns);
// put complex type columns at the end of dimension columns
allColumns.addAll(tableColumns.stream().filter(cols -> cols.isDimensionColumn() && (cols.isComplexColumn() || cols.getSchemaOrdinal() == -1)).collect(toList()));
// original measure columns
allColumns.addAll(tableColumns.stream().filter(cols -> !cols.isDimensionColumn()).collect(toList()));
// add new measure column
if (!newColumn.isDimensionColumn()) {
allColumns.add(newColumn);
}
allColumns.stream().filter(cols -> !cols.isInvisible()).collect(Collectors.groupingBy(ColumnSchema::getColumnName)).forEach((columnName, schemaList) -> {
if (schemaList.size() > 2) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Duplicate columns found"));
}
});
if (newColumn.isComplexColumn()) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Complex column cannot be added"));
}
List<ColumnSchema> finalAllColumns = allColumns;
allColumns.stream().forEach(columnSchema -> {
List<ColumnSchema> colWithSameId = finalAllColumns.stream().filter(x -> x.getColumnUniqueId().equals(columnSchema.getColumnUniqueId())).collect(toList());
if (colWithSameId.size() > 1) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Two columns can not have same columnId"));
}
});
if (tableInfo.getFactTable().getPartitionInfo() != null) {
List<ColumnSchema> par = tableInfo.getFactTable().getPartitionInfo().getColumnSchemaList();
allColumns = allColumns.stream().filter(cols -> !par.contains(cols)).collect(toList());
allColumns.addAll(par);
}
tableSchema.setListOfColumns(allColumns);
tableInfo.setLastUpdatedTime(timeStamp);
tableInfo.setFactTable(tableSchema);
SchemaEvolutionEntry schemaEvolutionEntry = new SchemaEvolutionEntry();
schemaEvolutionEntry.setTimeStamp(timeStamp);
schemaEvolutionEntry.setAdded(columnSchemas);
return schemaEvolutionEntry;
}
Aggregations