Search in sources :

Example 1 with SqlUtil

use of org.apache.calcite.sql.SqlUtil in project flink by apache.

the class HiveParserBaseSemanticAnalyzer method genValues.

public static RelNode genValues(String tabAlias, Table tmpTable, HiveParserRowResolver rowResolver, RelOptCluster cluster, List<List<String>> values) {
    List<TypeInfo> tmpTableTypes = tmpTable.getCols().stream().map(f -> TypeInfoUtils.getTypeInfoFromTypeString(f.getType())).collect(Collectors.toList());
    RexBuilder rexBuilder = cluster.getRexBuilder();
    // calcite types for each field
    List<RelDataType> calciteTargetTypes = tmpTableTypes.stream().map(ti -> HiveParserTypeConverter.convert((PrimitiveTypeInfo) ti, rexBuilder.getTypeFactory())).collect(Collectors.toList());
    // calcite field names
    List<String> calciteFieldNames = IntStream.range(0, calciteTargetTypes.size()).mapToObj(SqlUtil::deriveAliasFromOrdinal).collect(Collectors.toList());
    // calcite type for each row
    List<RelDataType> calciteRowTypes = new ArrayList<>();
    List<List<RexLiteral>> rows = new ArrayList<>();
    for (List<String> value : values) {
        Preconditions.checkArgument(value.size() == tmpTableTypes.size(), String.format("Values table col length (%d) and data length (%d) mismatch", tmpTableTypes.size(), value.size()));
        List<RexLiteral> row = new ArrayList<>();
        for (int i = 0; i < tmpTableTypes.size(); i++) {
            PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) tmpTableTypes.get(i);
            RelDataType calciteType = calciteTargetTypes.get(i);
            String col = value.get(i);
            if (col == null) {
                row.add(rexBuilder.makeNullLiteral(calciteType));
            } else {
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BYTE:
                    case SHORT:
                    case INT:
                    case LONG:
                        row.add(rexBuilder.makeExactLiteral(new BigDecimal(col), calciteType));
                        break;
                    case DECIMAL:
                        BigDecimal bigDec = new BigDecimal(col);
                        row.add(SqlTypeUtil.isValidDecimalValue(bigDec, calciteType) ? rexBuilder.makeExactLiteral(bigDec, calciteType) : rexBuilder.makeNullLiteral(calciteType));
                        break;
                    case FLOAT:
                    case DOUBLE:
                        row.add(rexBuilder.makeApproxLiteral(new BigDecimal(col), calciteType));
                        break;
                    case BOOLEAN:
                        row.add(rexBuilder.makeLiteral(Boolean.parseBoolean(col)));
                        break;
                    default:
                        row.add(rexBuilder.makeCharLiteral(HiveParserUtils.asUnicodeString(col)));
                }
            }
        }
        calciteRowTypes.add(rexBuilder.getTypeFactory().createStructType(row.stream().map(RexLiteral::getType).collect(Collectors.toList()), calciteFieldNames));
        rows.add(row);
    }
    // compute the final row type
    RelDataType calciteRowType = rexBuilder.getTypeFactory().leastRestrictive(calciteRowTypes);
    for (int i = 0; i < calciteFieldNames.size(); i++) {
        ColumnInfo colInfo = new ColumnInfo(calciteFieldNames.get(i), HiveParserTypeConverter.convert(calciteRowType.getFieldList().get(i).getType()), tabAlias, false);
        rowResolver.put(tabAlias, calciteFieldNames.get(i), colInfo);
    }
    return HiveParserUtils.genValuesRelNode(cluster, rexBuilder.getTypeFactory().createStructType(calciteRowType.getFieldList()), rows);
}
Also used : Tuple2(org.apache.flink.api.java.tuple.Tuple2) Order(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order) LoggerFactory(org.slf4j.LoggerFactory) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FunctionRegistry(org.apache.hadoop.hive.ql.exec.FunctionRegistry) OrderSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderSpec) StringUtils(org.apache.commons.lang3.StringUtils) HiveParserUtils(org.apache.flink.table.planner.delegation.hive.HiveParserUtils) SqlCall(org.apache.calcite.sql.SqlCall) BigDecimal(java.math.BigDecimal) CorrelationId(org.apache.calcite.rel.core.CorrelationId) SqlNode(org.apache.calcite.sql.SqlNode) HiveParserErrorMsg(org.apache.flink.table.planner.delegation.hive.parse.HiveParserErrorMsg) SqlUtil(org.apache.calcite.sql.SqlUtil) Pair(org.apache.commons.lang3.tuple.Pair) HiveParserTypeCheckProcFactory(org.apache.flink.table.planner.delegation.hive.HiveParserTypeCheckProcFactory) RexNode(org.apache.calcite.rex.RexNode) Map(java.util.Map) HiveParserUtils.removeASTChild(org.apache.flink.table.planner.delegation.hive.HiveParserUtils.removeASTChild) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelOptCluster(org.apache.calcite.plan.RelOptCluster) RexWindowBound(org.apache.calcite.rex.RexWindowBound) ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HiveParserDDLSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer) SqlKind(org.apache.calcite.sql.SqlKind) TreeVisitor(org.antlr.runtime.tree.TreeVisitor) RexLiteral(org.apache.calcite.rex.RexLiteral) org.apache.hadoop.hive.serde.serdeConstants(org.apache.hadoop.hive.serde.serdeConstants) Set(java.util.Set) Preconditions(org.apache.flink.util.Preconditions) SessionState(org.apache.hadoop.hive.ql.session.SessionState) Collectors(java.util.stream.Collectors) PartitionSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionSpec) Serializable(java.io.Serializable) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) List(java.util.List) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) UnsupportedEncodingException(java.io.UnsupportedEncodingException) HiveASTParser(org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser) ErrorMsg(org.apache.hadoop.hive.ql.ErrorMsg) OrderExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.OrderExpression) RexCall(org.apache.calcite.rex.RexCall) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IntStream(java.util.stream.IntStream) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) PartitioningSpec(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitioningSpec) HashMap(java.util.HashMap) Deque(java.util.Deque) NullOrder(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.NullOrder) ArrayList(java.util.ArrayList) Utilities(org.apache.hadoop.hive.ql.exec.Utilities) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) RexFieldCollation(org.apache.calcite.rex.RexFieldCollation) SqlLiteral(org.apache.calcite.sql.SqlLiteral) PartitionExpression(org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionExpression) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) SqlWindow(org.apache.calcite.sql.SqlWindow) TreeVisitorAction(org.antlr.runtime.tree.TreeVisitorAction) PlanUtils(org.apache.hadoop.hive.ql.plan.PlanUtils) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) Hive(org.apache.hadoop.hive.ql.metadata.Hive) WindowingSpec(org.apache.hadoop.hive.ql.parse.WindowingSpec) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) Logger(org.slf4j.Logger) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) Iterator(java.util.Iterator) RexBuilder(org.apache.calcite.rex.RexBuilder) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Table(org.apache.hadoop.hive.ql.metadata.Table) SqlTypeUtil(org.apache.calcite.sql.type.SqlTypeUtil) RelNode(org.apache.calcite.rel.RelNode) FrameworkConfig(org.apache.calcite.tools.FrameworkConfig) Node(org.apache.hadoop.hive.ql.lib.Node) Partition(org.apache.hadoop.hive.ql.metadata.Partition) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) Tree(org.antlr.runtime.tree.Tree) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveParserRexNodeConverter(org.apache.flink.table.planner.delegation.hive.HiveParserRexNodeConverter) ObjectPair(org.apache.hadoop.hive.common.ObjectPair) HiveParserConstants(org.apache.flink.table.planner.delegation.hive.HiveParserConstants) BitSet(java.util.BitSet) ArrayDeque(java.util.ArrayDeque) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) Collections(java.util.Collections) RexLiteral(org.apache.calcite.rex.RexLiteral) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) BigDecimal(java.math.BigDecimal) RexBuilder(org.apache.calcite.rex.RexBuilder) List(java.util.List) ArrayList(java.util.ArrayList)

Aggregations

Serializable (java.io.Serializable)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 BigDecimal (java.math.BigDecimal)1 ArrayDeque (java.util.ArrayDeque)1 ArrayList (java.util.ArrayList)1 BitSet (java.util.BitSet)1 Collections (java.util.Collections)1 Deque (java.util.Deque)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Iterator (java.util.Iterator)1 LinkedHashMap (java.util.LinkedHashMap)1 List (java.util.List)1 Map (java.util.Map)1 Set (java.util.Set)1 Collectors (java.util.stream.Collectors)1 IntStream (java.util.stream.IntStream)1 Tree (org.antlr.runtime.tree.Tree)1 TreeVisitor (org.antlr.runtime.tree.TreeVisitor)1 TreeVisitorAction (org.antlr.runtime.tree.TreeVisitorAction)1