Search in sources :

Example 51 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class TestVectorFilterCompare method testStringFamily.

@Test
public void testStringFamily() throws Exception {
    Random random = new Random(7743);
    doTests(random, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
    doTests(random, new CharTypeInfo(10), new CharTypeInfo(10));
    doTests(random, new VarcharTypeInfo(10), new VarcharTypeInfo(10));
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) Random(java.util.Random) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Test(org.junit.Test)

Example 52 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.

the class TestGenericUDFCeil method testChar.

@Test
public void testChar() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();
    HiveChar vc = new HiveChar("-32300.004747", 12);
    HiveCharWritable input = new HiveCharWritable(vc);
    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(-32300L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 53 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project Taier by DTStack.

the class AlterAstNodeParser method getColumns.

private List<Column> getColumns(ASTNode tableColsNode) throws Exception {
    if (CollectionUtils.isEmpty(tableColsNode.getChildren())) {
        return null;
    }
    List<Column> columns = new ArrayList<>();
    Column column;
    ArrayList<Node> nodes;
    String colType;
    String comment;
    String name;
    int index = 0;
    for (Node node : tableColsNode.getChildren()) {
        column = new Column();
        nodes = ((ASTNode) node).getChildren();
        name = ((ASTNode) nodes.get(0)).getToken().getText();
        if (name.matches("^[`'\"].*")) {
            name = name.substring(1, name.length() - 1);
        }
        column.setName(name);
        colType = ((ASTNode) nodes.get(1)).getToken().getText();
        if ("TOK_DECIMAL".equals(colType)) {
            DecimalTypeInfo decimalTypeInfo = ParseUtils.getDecimalTypeTypeInfo((ASTNode) nodes.get(1));
            colType = decimalTypeInfo.getTypeName();
        } else if ("TOK_VARCHAR".equals(colType)) {
            VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo((ASTNode) nodes.get(1));
            colType = varcharTypeInfo.getTypeName();
        } else if ("TOK_CHAR".equals(colType)) {
            CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo((ASTNode) nodes.get(1));
            colType = charTypeInfo.getTypeName();
        }
        column.setType(colType.startsWith("TOK_") ? colType.substring(4) : colType);
        if (nodes.size() == 3) {
            comment = ((ASTNode) nodes.get(2)).getToken().getText();
            if (comment.matches("^['\"].*")) {
                comment = comment.substring(1, comment.length() - 1);
            }
            column.setComment(comment);
        }
        column.setIndex(index++);
        columns.add(column);
    }
    return columns;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) ArrayList(java.util.ArrayList) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 54 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hetu-core by openlookeng.

the class CarbondataHetuFilterUtil method convertDataByType.

private static Object convertDataByType(Object rawData, HiveType type) {
    if (type.equals(HiveType.HIVE_INT) || type.equals(HiveType.HIVE_SHORT)) {
        return Integer.valueOf(rawData.toString());
    } else if (type.equals(HiveType.HIVE_LONG)) {
        return rawData;
    } else if (type.equals(HiveType.HIVE_STRING) || type.getTypeInfo() instanceof VarcharTypeInfo || type.getTypeInfo() instanceof CharTypeInfo) {
        if (rawData instanceof Slice) {
            String value = ((Slice) rawData).toStringUtf8();
            if (type.getTypeInfo() instanceof CharTypeInfo) {
                StringBuilder padding = new StringBuilder();
                int paddedLength = ((CharTypeInfo) type.getTypeInfo()).getLength();
                int truncatedLength = value.length();
                for (int i = 0; i < paddedLength - truncatedLength; i++) {
                    padding.append(" ");
                }
                return value + padding;
            }
            return value;
        } else {
            return rawData;
        }
    } else if (type.equals(HiveType.HIVE_BOOLEAN)) {
        return rawData;
    } else if (type.equals(HiveType.HIVE_DATE)) {
        Calendar c = Calendar.getInstance();
        c.setTime(new Date(0));
        c.add(Calendar.DAY_OF_YEAR, ((Long) rawData).intValue());
        Date date = c.getTime();
        return date.getTime() * 1000;
    } else if (type.getTypeInfo() instanceof DecimalTypeInfo) {
        if (rawData instanceof Double) {
            return new BigDecimal((Double) rawData);
        } else if (rawData instanceof Long) {
            return new BigDecimal(new BigInteger(String.valueOf(rawData)), ((DecimalTypeInfo) type.getTypeInfo()).getScale());
        } else if (rawData instanceof Slice) {
            return new BigDecimal(Decimals.decodeUnscaledValue((Slice) rawData), ((DecimalTypeInfo) type.getTypeInfo()).getScale());
        }
    } else if (type.equals(HiveType.HIVE_TIMESTAMP)) {
        return (Long) rawData * 1000;
    }
    return rawData;
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) Calendar(java.util.Calendar) Date(java.util.Date) BigDecimal(java.math.BigDecimal) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) Slice(io.airlift.slice.Slice) BigInteger(java.math.BigInteger)

Example 55 with CharTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method getTypeName.

public static String getTypeName(HiveParserASTNode node) throws SemanticException {
    int token = node.getType();
    String typeName;
    // datetime type isn't currently supported
    if (token == HiveASTParser.TOK_DATETIME) {
        throw new ValidationException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
    }
    switch(token) {
        case HiveASTParser.TOK_CHAR:
            CharTypeInfo charTypeInfo = HiveASTParseUtils.getCharTypeInfo(node);
            typeName = charTypeInfo.getQualifiedName();
            break;
        case HiveASTParser.TOK_VARCHAR:
            VarcharTypeInfo varcharTypeInfo = HiveASTParseUtils.getVarcharTypeInfo(node);
            typeName = varcharTypeInfo.getQualifiedName();
            break;
        case HiveASTParser.TOK_DECIMAL:
            DecimalTypeInfo decTypeInfo = HiveASTParseUtils.getDecimalTypeTypeInfo(node);
            typeName = decTypeInfo.getQualifiedName();
            break;
        default:
            typeName = TokenToTypeName.get(token);
    }
    return typeName;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) ValidationException(org.apache.flink.table.api.ValidationException) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint)

Aggregations

CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)40 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)33 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)25 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)24 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)22 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)19 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)16 Text (org.apache.hadoop.io.Text)15 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)14 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)12 LongWritable (org.apache.hadoop.io.LongWritable)12 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)11 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)10 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)10 BytesWritable (org.apache.hadoop.io.BytesWritable)10 IntWritable (org.apache.hadoop.io.IntWritable)10 Test (org.junit.Test)10 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)9 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)9