use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.
the class ASTBuilder method literal.
public static ASTNode literal(RexLiteral literal) {
Object val = null;
int type = 0;
SqlTypeName sqlType = literal.getType().getSqlTypeName();
switch(sqlType) {
case BINARY:
case DATE:
case TIME:
case TIMESTAMP:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_MONTH:
case INTERVAL_SECOND:
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
case MAP:
case ARRAY:
case ROW:
if (literal.getValue() == null) {
return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
}
break;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DOUBLE:
case DECIMAL:
case FLOAT:
case REAL:
case VARCHAR:
case CHAR:
case BOOLEAN:
if (literal.getValue3() == null) {
return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
}
}
switch(sqlType) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
val = literal.getValue3();
// Hive makes a distinction between them most importantly IntegralLiteral
if (val instanceof BigDecimal) {
val = ((BigDecimal) val).longValue();
}
switch(sqlType) {
case TINYINT:
val += "Y";
break;
case SMALLINT:
val += "S";
break;
case INTEGER:
val += "";
break;
case BIGINT:
val += "L";
break;
}
type = HiveParser.IntegralLiteral;
break;
case DOUBLE:
val = literal.getValue3() + "D";
type = HiveParser.NumberLiteral;
break;
case DECIMAL:
val = literal.getValue3() + "BD";
type = HiveParser.NumberLiteral;
break;
case FLOAT:
case REAL:
val = literal.getValue3() + "F";
type = HiveParser.Number;
break;
case VARCHAR:
case CHAR:
val = literal.getValue3();
String escapedVal = BaseSemanticAnalyzer.escapeSQLString(String.valueOf(val));
type = HiveParser.StringLiteral;
val = "'" + escapedVal + "'";
break;
case BOOLEAN:
val = literal.getValue3();
type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
break;
case DATE:
val = "'" + literal.getValueAs(DateString.class).toString() + "'";
type = HiveParser.TOK_DATELITERAL;
break;
case TIME:
val = "'" + literal.getValueAs(TimeString.class).toString() + "'";
type = HiveParser.TOK_TIMESTAMPLITERAL;
break;
case TIMESTAMP:
val = "'" + literal.getValueAs(TimestampString.class).toString() + "'";
type = HiveParser.TOK_TIMESTAMPLITERAL;
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
// Calcite stores timestamp with local time-zone in UTC internally, thus
// when we bring it back, we need to add the UTC suffix.
val = "'" + literal.getValueAs(TimestampString.class).toString() + " UTC'";
type = HiveParser.TOK_TIMESTAMPLOCALTZLITERAL;
break;
case INTERVAL_YEAR:
case INTERVAL_MONTH:
case INTERVAL_YEAR_MONTH:
{
type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
BigDecimal monthsBd = (BigDecimal) literal.getValue();
HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
val = "'" + intervalYearMonth.toString() + "'";
}
break;
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
{
type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
BigDecimal millisBd = (BigDecimal) literal.getValue();
// Calcite literal is in millis, convert to seconds
BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
val = "'" + intervalDayTime.toString() + "'";
}
break;
case NULL:
type = HiveParser.TOK_NULL;
break;
// binary, ROW type should not be seen.
case BINARY:
case ROW:
default:
throw new RuntimeException("Unsupported Type: " + sqlType);
}
return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
}
use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.
the class GenericUDF method getIntervalYearMonthValue.
protected HiveIntervalYearMonth getIntervalYearMonthValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
}
HiveIntervalYearMonth intervalYearMonth;
switch(inputTypes[i]) {
case STRING:
case VARCHAR:
case CHAR:
String intervalYearMonthStr = converters[i].convert(obj).toString();
intervalYearMonth = HiveIntervalYearMonth.valueOf(intervalYearMonthStr);
break;
case INTERVAL_YEAR_MONTH:
Object writableValue = converters[i].convert(obj);
intervalYearMonth = ((HiveIntervalYearMonthWritable) writableValue).getHiveIntervalYearMonth();
break;
default:
throw new UDFArgumentTypeException(0, getFuncName() + " only takes INTERVAL_YEAR_MONTH and STRING_GROUP types, got " + inputTypes[i]);
}
return intervalYearMonth;
}
use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.
the class RexNodeExprFactory method createIntervalYearConstantExpr.
/**
* {@inheritDoc}
*/
@Override
protected RexLiteral createIntervalYearConstantExpr(String value) {
HiveIntervalYearMonth v = new HiveIntervalYearMonth(Integer.parseInt(value), 0);
BigDecimal totalMonths = BigDecimal.valueOf(v.getTotalMonths());
return rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
}
use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.
the class DateTimeMath method add.
public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
HiveIntervalYearMonth result = null;
if (left == null || right == null) {
return null;
}
result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths());
return result;
}
use of org.apache.hadoop.hive.common.type.HiveIntervalYearMonth in project hive by apache.
the class VerifyFastRow method serializeWrite.
public static void serializeWrite(SerializeWrite serializeWrite, TypeInfo typeInfo, Object object) throws IOException {
if (object == null) {
serializeWrite.writeNull();
return;
}
switch(typeInfo.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean value = ((BooleanWritable) object).get();
serializeWrite.writeBoolean(value);
}
break;
case BYTE:
{
byte value = ((ByteWritable) object).get();
serializeWrite.writeByte(value);
}
break;
case SHORT:
{
short value = ((ShortWritable) object).get();
serializeWrite.writeShort(value);
}
break;
case INT:
{
int value = ((IntWritable) object).get();
serializeWrite.writeInt(value);
}
break;
case LONG:
{
long value = ((LongWritable) object).get();
serializeWrite.writeLong(value);
}
break;
case FLOAT:
{
float value = ((FloatWritable) object).get();
serializeWrite.writeFloat(value);
}
break;
case DOUBLE:
{
double value = ((DoubleWritable) object).get();
serializeWrite.writeDouble(value);
}
break;
case STRING:
{
Text value = (Text) object;
byte[] stringBytes = value.getBytes();
int stringLength = stringBytes.length;
serializeWrite.writeString(stringBytes, 0, stringLength);
}
break;
case CHAR:
{
HiveChar value = ((HiveCharWritable) object).getHiveChar();
serializeWrite.writeHiveChar(value);
}
break;
case VARCHAR:
{
HiveVarchar value = ((HiveVarcharWritable) object).getHiveVarchar();
serializeWrite.writeHiveVarchar(value);
}
break;
case DECIMAL:
{
HiveDecimal value = ((HiveDecimalWritable) object).getHiveDecimal();
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
serializeWrite.writeHiveDecimal(value, decTypeInfo.scale());
}
break;
case DATE:
{
Date value = ((DateWritableV2) object).get();
serializeWrite.writeDate(value);
}
break;
case TIMESTAMP:
{
Timestamp value = ((TimestampWritableV2) object).getTimestamp();
serializeWrite.writeTimestamp(value);
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonth value = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
serializeWrite.writeHiveIntervalYearMonth(value);
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTime value = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
serializeWrite.writeHiveIntervalDayTime(value);
}
break;
case BINARY:
{
BytesWritable byteWritable = (BytesWritable) object;
byte[] binaryBytes = byteWritable.getBytes();
int length = byteWritable.getLength();
serializeWrite.writeBinary(binaryBytes, 0, length);
}
break;
default:
throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory().name());
}
}
break;
case LIST:
{
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
ArrayList<Object> elements = (ArrayList<Object>) object;
serializeWrite.beginList(elements);
boolean isFirst = true;
for (Object elementObject : elements) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateList();
}
if (elementObject == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, elementTypeInfo, elementObject);
}
}
serializeWrite.finishList();
}
break;
case MAP:
{
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
Map<Object, Object> hashMap = (Map<Object, Object>) object;
serializeWrite.beginMap(hashMap);
boolean isFirst = true;
for (Map.Entry<Object, Object> entry : hashMap.entrySet()) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateKeyValuePair();
}
if (entry.getKey() == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, keyTypeInfo, entry.getKey());
}
serializeWrite.separateKey();
if (entry.getValue() == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, valueTypeInfo, entry.getValue());
}
}
serializeWrite.finishMap();
}
break;
case STRUCT:
{
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
List<Object> fieldValues = (List<Object>) object;
final int size = fieldValues.size();
serializeWrite.beginStruct(fieldValues);
boolean isFirst = true;
for (int i = 0; i < size; i++) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateStruct();
}
serializeWrite(serializeWrite, fieldTypeInfos.get(i), fieldValues.get(i));
}
serializeWrite.finishStruct();
}
break;
case UNION:
{
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> fieldTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final int size = fieldTypeInfos.size();
StandardUnionObjectInspector.StandardUnion standardUnion = (StandardUnionObjectInspector.StandardUnion) object;
byte tag = standardUnion.getTag();
serializeWrite.beginUnion(tag);
serializeWrite(serializeWrite, fieldTypeInfos.get(tag), standardUnion.getObject());
serializeWrite.finishUnion();
}
break;
default:
throw new Error("Unknown category " + typeInfo.getCategory().name());
}
}
Aggregations