use of org.apache.calcite.util.NlsString in project flink by apache.
the class HiveParserRexNodeConverter method convertConstant.
public static RexNode convertConstant(ExprNodeConstantDesc literal, RelOptCluster cluster) throws SemanticException {
RexBuilder rexBuilder = cluster.getRexBuilder();
RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
RelDataType calciteDataType = HiveParserTypeConverter.convert(hiveType, dtFactory);
PrimitiveObjectInspector.PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
ConstantObjectInspector coi = literal.getWritableObjectInspector();
Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
RexNode calciteLiteral;
HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
// If value is null, the type should also be VOID.
if (value == null) {
hiveTypeCategory = PrimitiveObjectInspector.PrimitiveCategory.VOID;
}
// TODO: Verify if we need to use ConstantObjectInspector to unwrap data
switch(hiveTypeCategory) {
case BOOLEAN:
calciteLiteral = rexBuilder.makeLiteral((Boolean) value);
break;
case BYTE:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
break;
case SHORT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
break;
case INT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
break;
case LONG:
calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
break;
// TODO: is Decimal an exact numeric or approximate numeric?
case DECIMAL:
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof Decimal128) {
value = ((Decimal128) value).toBigDecimal();
}
if (value == null) {
// For now, we will not run CBO in the presence of invalid decimal literals.
throw new SemanticException("Expression " + literal.getExprString() + " is not a valid decimal");
// TODO: return createNullLiteral(literal);
}
BigDecimal bd = (BigDecimal) value;
BigInteger unscaled = bd.unscaledValue();
if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
calciteLiteral = rexBuilder.makeExactLiteral(bd);
} else {
// CBO doesn't support unlimited precision decimals. In practice, this
// will work...
// An alternative would be to throw CboSemanticException and fall back
// to no CBO.
RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
}
break;
case FLOAT:
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
break;
case DOUBLE:
// TODO: The best solution is to support NaN in expression reduction.
if (Double.isNaN((Double) value)) {
throw new SemanticException("NaN");
}
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
break;
case CHAR:
if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case VARCHAR:
if (value instanceof HiveVarchar) {
value = ((HiveVarchar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case STRING:
Object constantDescVal = literal.getValue();
constantDescVal = constantDescVal instanceof NlsString ? constantDescVal : asUnicodeString((String) value);
// calcite treat string literal as char type, we should treat it as string just like
// hive
RelDataType type = HiveParserTypeConverter.convert(hiveType, dtFactory);
// if we get here, the value is not null
type = dtFactory.createTypeWithNullability(type, false);
calciteLiteral = rexBuilder.makeLiteral(constantDescVal, type, true);
break;
case DATE:
LocalDate localDate = HiveParserUtils.getSessionHiveShim().toFlinkDate(value);
DateString dateString = new DateString(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
calciteLiteral = rexBuilder.makeDateLiteral(dateString);
break;
case TIMESTAMP:
TimestampString timestampString;
if (value instanceof Calendar) {
timestampString = TimestampString.fromCalendarFields((Calendar) value);
} else {
LocalDateTime localDateTime = HiveParserUtils.getSessionHiveShim().toFlinkTimestamp(value);
timestampString = new TimestampString(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(), localDateTime.getHour(), localDateTime.getMinute(), localDateTime.getSecond());
timestampString = timestampString.withNanos(localDateTime.getNano());
}
// hive always treats timestamp with precision 9
calciteLiteral = rexBuilder.makeTimestampLiteral(timestampString, 9);
break;
case VOID:
calciteLiteral = cluster.getRexBuilder().makeLiteral(null, dtFactory.createSqlType(SqlTypeName.NULL), true);
break;
case BINARY:
case UNKNOWN:
default:
if (hiveShim.isIntervalYearMonthType(hiveTypeCategory)) {
// Calcite year-month literal value is months as BigDecimal
BigDecimal totalMonths = BigDecimal.valueOf(((HiveParserIntervalYearMonth) value).getTotalMonths());
calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
} else if (hiveShim.isIntervalDayTimeType(hiveTypeCategory)) {
// Calcite day-time interval is millis value as BigDecimal
// Seconds converted to millis
BigDecimal secsValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getTotalSeconds() * 1000);
// Nanos converted to millis
BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveParserIntervalDayTime) value).getNanos(), 6);
calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
} else {
throw new RuntimeException("UnSupported Literal type " + hiveTypeCategory);
}
}
return calciteLiteral;
}
use of org.apache.calcite.util.NlsString in project flink by apache.
the class RichSqlInsert method getStaticPartitionKVs.
/**
* Get static partition key value pair as strings.
*
* <p>For character literals we return the unquoted and unescaped values. For other types we use
* {@link SqlLiteral#toString()} to get the string format of the value literal. If the string
* format is not what you need, use {@link #getStaticPartitions()}.
*
* @return the mapping of column names to values of partition specifications, returns an empty
* map if there is no partition specifications.
*/
public LinkedHashMap<String, String> getStaticPartitionKVs() {
LinkedHashMap<String, String> ret = new LinkedHashMap<>();
if (this.staticPartitions.size() == 0) {
return ret;
}
for (SqlNode node : this.staticPartitions.getList()) {
SqlProperty sqlProperty = (SqlProperty) node;
Comparable comparable = SqlLiteral.value(sqlProperty.getValue());
String value = comparable instanceof NlsString ? ((NlsString) comparable).getValue() : comparable.toString();
ret.put(sqlProperty.getKey().getSimple(), value);
}
return ret;
}
use of org.apache.calcite.util.NlsString in project hazelcast by hazelcast.
the class HazelcastDynamicTableFunction method extractMapStringValue.
private static String extractMapStringValue(String functionName, HazelcastTableFunctionParameter parameter, SqlNode node, HazelcastSqlValidator validator) {
if (node.getKind() == SqlKind.DYNAMIC_PARAM) {
Object value = validator.getArgumentAt(((SqlDynamicParam) node).getIndex());
if (value instanceof String) {
return (String) value;
}
}
if (SqlUtil.isLiteral(node)) {
SqlLiteral literal = (SqlLiteral) node;
Object value = literal.getValue();
if (value instanceof NlsString) {
return ((NlsString) value).getValue();
}
}
throw QueryException.error("All values in the MAP constructor of the call to function " + functionName + ", argument #" + parameter.ordinal() + " (" + parameter.name() + ") must be VARCHAR literals. " + "Actual argument is: " + (SqlUtil.isLiteral(node) ? ((SqlLiteral) node).getTypeName() : node.getKind()));
}
use of org.apache.calcite.util.NlsString in project hive by apache.
the class RexNodeExprFactory method interpretConstantAsPrimitive.
/**
* {@inheritDoc}
*/
@Override
protected Object interpretConstantAsPrimitive(PrimitiveTypeInfo targetType, Object constantValue, PrimitiveTypeInfo sourceType, boolean isEqual) {
// Extract string value if necessary
Object constantToInterpret = constantValue;
if (constantValue instanceof NlsString) {
constantToInterpret = ((NlsString) constantValue).getValue();
}
if (constantToInterpret instanceof Number || constantToInterpret instanceof String) {
try {
PrimitiveTypeEntry primitiveTypeEntry = targetType.getPrimitiveTypeEntry();
if (PrimitiveObjectInspectorUtils.intTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString()).intValueExact();
} else if (PrimitiveObjectInspectorUtils.longTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString()).longValueExact();
} else if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString());
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString());
} else if (PrimitiveObjectInspectorUtils.byteTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString()).byteValueExact();
} else if (PrimitiveObjectInspectorUtils.shortTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantToInterpret.toString()).shortValueExact();
} else if (PrimitiveObjectInspectorUtils.decimalTypeEntry.equals(primitiveTypeEntry)) {
HiveDecimal decimal = HiveDecimal.create(constantToInterpret.toString());
return decimal != null ? decimal.bigDecimalValue() : null;
}
} catch (NumberFormatException | ArithmeticException nfe) {
if (!isEqual && (constantToInterpret instanceof Number || NumberUtils.isNumber(constantToInterpret.toString()))) {
// type conversion for us.
return constantToInterpret;
}
LOG.trace("Failed to narrow type of constant", nfe);
return null;
}
}
if (constantToInterpret instanceof BigDecimal) {
return constantToInterpret;
}
String constTypeInfoName = sourceType.getTypeName();
if (constTypeInfoName.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
// appropriate type.
if (targetType instanceof CharTypeInfo) {
final String constValue = constantToInterpret.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveChar newValue = new HiveChar(constValue, length);
HiveChar maxCharConst = new HiveChar(constValue, HiveChar.MAX_CHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return makeHiveUnicodeString(newValue.getValue());
} else {
return null;
}
}
if (targetType instanceof VarcharTypeInfo) {
final String constValue = constantToInterpret.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveVarchar newValue = new HiveVarchar(constValue, length);
HiveVarchar maxCharConst = new HiveVarchar(constValue, HiveVarchar.MAX_VARCHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return makeHiveUnicodeString(newValue.getValue());
} else {
return null;
}
}
}
return constantValue;
}
use of org.apache.calcite.util.NlsString in project hive by apache.
the class HiveSubQRemoveRelBuilder method inferAlias.
/**
* Infers the alias of an expression.
*
* <p>If the expression was created by {@link #alias}, replaces the expression
* in the project list.
*/
private String inferAlias(List<RexNode> exprList, RexNode expr) {
switch(expr.getKind()) {
case INPUT_REF:
final RexInputRef ref = (RexInputRef) expr;
return peek(0).getRowType().getFieldNames().get(ref.getIndex());
case CAST:
return inferAlias(exprList, ((RexCall) expr).getOperands().get(0));
case AS:
final RexCall call = (RexCall) expr;
for (; ; ) {
final int i = exprList.indexOf(expr);
if (i < 0) {
break;
}
exprList.set(i, call.getOperands().get(0));
}
return ((NlsString) ((RexLiteral) call.getOperands().get(1)).getValue()).getValue();
default:
return null;
}
}
Aggregations