use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class TestVectorFilterCompare method testStringFamily.
@Test
public void testStringFamily() throws Exception {
Random random = new Random(7743);
doTests(random, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
doTests(random, new CharTypeInfo(10), new CharTypeInfo(10));
doTests(random, new VarcharTypeInfo(10), new VarcharTypeInfo(10));
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hive by apache.
the class TestGenericUDFCeil method testChar.
@Test
public void testChar() throws HiveException {
GenericUDFCeil udf = new GenericUDFCeil();
HiveChar vc = new HiveChar("-32300.004747", 12);
HiveCharWritable input = new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(-32300L, res.get());
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project Taier by DTStack.
the class AlterAstNodeParser method getColumns.
private List<Column> getColumns(ASTNode tableColsNode) throws Exception {
if (CollectionUtils.isEmpty(tableColsNode.getChildren())) {
return null;
}
List<Column> columns = new ArrayList<>();
Column column;
ArrayList<Node> nodes;
String colType;
String comment;
String name;
int index = 0;
for (Node node : tableColsNode.getChildren()) {
column = new Column();
nodes = ((ASTNode) node).getChildren();
name = ((ASTNode) nodes.get(0)).getToken().getText();
if (name.matches("^[`'\"].*")) {
name = name.substring(1, name.length() - 1);
}
column.setName(name);
colType = ((ASTNode) nodes.get(1)).getToken().getText();
if ("TOK_DECIMAL".equals(colType)) {
DecimalTypeInfo decimalTypeInfo = ParseUtils.getDecimalTypeTypeInfo((ASTNode) nodes.get(1));
colType = decimalTypeInfo.getTypeName();
} else if ("TOK_VARCHAR".equals(colType)) {
VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo((ASTNode) nodes.get(1));
colType = varcharTypeInfo.getTypeName();
} else if ("TOK_CHAR".equals(colType)) {
CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo((ASTNode) nodes.get(1));
colType = charTypeInfo.getTypeName();
}
column.setType(colType.startsWith("TOK_") ? colType.substring(4) : colType);
if (nodes.size() == 3) {
comment = ((ASTNode) nodes.get(2)).getToken().getText();
if (comment.matches("^['\"].*")) {
comment = comment.substring(1, comment.length() - 1);
}
column.setComment(comment);
}
column.setIndex(index++);
columns.add(column);
}
return columns;
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project hetu-core by openlookeng.
the class CarbondataHetuFilterUtil method convertDataByType.
private static Object convertDataByType(Object rawData, HiveType type) {
if (type.equals(HiveType.HIVE_INT) || type.equals(HiveType.HIVE_SHORT)) {
return Integer.valueOf(rawData.toString());
} else if (type.equals(HiveType.HIVE_LONG)) {
return rawData;
} else if (type.equals(HiveType.HIVE_STRING) || type.getTypeInfo() instanceof VarcharTypeInfo || type.getTypeInfo() instanceof CharTypeInfo) {
if (rawData instanceof Slice) {
String value = ((Slice) rawData).toStringUtf8();
if (type.getTypeInfo() instanceof CharTypeInfo) {
StringBuilder padding = new StringBuilder();
int paddedLength = ((CharTypeInfo) type.getTypeInfo()).getLength();
int truncatedLength = value.length();
for (int i = 0; i < paddedLength - truncatedLength; i++) {
padding.append(" ");
}
return value + padding;
}
return value;
} else {
return rawData;
}
} else if (type.equals(HiveType.HIVE_BOOLEAN)) {
return rawData;
} else if (type.equals(HiveType.HIVE_DATE)) {
Calendar c = Calendar.getInstance();
c.setTime(new Date(0));
c.add(Calendar.DAY_OF_YEAR, ((Long) rawData).intValue());
Date date = c.getTime();
return date.getTime() * 1000;
} else if (type.getTypeInfo() instanceof DecimalTypeInfo) {
if (rawData instanceof Double) {
return new BigDecimal((Double) rawData);
} else if (rawData instanceof Long) {
return new BigDecimal(new BigInteger(String.valueOf(rawData)), ((DecimalTypeInfo) type.getTypeInfo()).getScale());
} else if (rawData instanceof Slice) {
return new BigDecimal(Decimals.decodeUnscaledValue((Slice) rawData), ((DecimalTypeInfo) type.getTypeInfo()).getScale());
}
} else if (type.equals(HiveType.HIVE_TIMESTAMP)) {
return (Long) rawData * 1000;
}
return rawData;
}
use of org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo in project flink-mirror by flink-ci.
the class HiveParserDDLSemanticAnalyzer method getTypeName.
public static String getTypeName(HiveParserASTNode node) throws SemanticException {
int token = node.getType();
String typeName;
// datetime type isn't currently supported
if (token == HiveASTParser.TOK_DATETIME) {
throw new ValidationException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
switch(token) {
case HiveASTParser.TOK_CHAR:
CharTypeInfo charTypeInfo = HiveASTParseUtils.getCharTypeInfo(node);
typeName = charTypeInfo.getQualifiedName();
break;
case HiveASTParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = HiveASTParseUtils.getVarcharTypeInfo(node);
typeName = varcharTypeInfo.getQualifiedName();
break;
case HiveASTParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = HiveASTParseUtils.getDecimalTypeTypeInfo(node);
typeName = decTypeInfo.getQualifiedName();
break;
default:
typeName = TokenToTypeName.get(token);
}
return typeName;
}
Aggregations