use of org.apache.calcite.sql.parser.SqlParserPos in project hive by apache.
the class RexNodeConverter method convert.
protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
RexBuilder rexBuilder = cluster.getRexBuilder();
RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
ConstantObjectInspector coi = literal.getWritableObjectInspector();
Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
RexNode calciteLiteral = null;
// If value is null, the type should also be VOID.
if (value == null) {
hiveTypeCategory = PrimitiveCategory.VOID;
}
// TODO: Verify if we need to use ConstantObjectInspector to unwrap data
switch(hiveTypeCategory) {
case BOOLEAN:
calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
break;
case BYTE:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
break;
case SHORT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
break;
case INT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
break;
case LONG:
calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
break;
// TODO: is Decimal an exact numeric or approximate numeric?
case DECIMAL:
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof Decimal128) {
value = ((Decimal128) value).toBigDecimal();
}
if (value == null) {
// literals.
throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
// TODO: return createNullLiteral(literal);
}
BigDecimal bd = (BigDecimal) value;
BigInteger unscaled = bd.unscaledValue();
if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
calciteLiteral = rexBuilder.makeExactLiteral(bd);
} else {
// CBO doesn't support unlimited precision decimals. In practice, this
// will work...
// An alternative would be to throw CboSemanticException and fall back
// to no CBO.
RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, unscaled.toString().length(), bd.scale());
calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
}
break;
case FLOAT:
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
break;
case DOUBLE:
// TODO: The best solution is to support NaN in expression reduction.
if (Double.isNaN((Double) value)) {
throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
}
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
break;
case CHAR:
if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case VARCHAR:
if (value instanceof HiveVarchar) {
value = ((HiveVarchar) value).getValue();
}
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case STRING:
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case DATE:
Calendar cal = new GregorianCalendar();
cal.setTime((Date) value);
calciteLiteral = rexBuilder.makeDateLiteral(cal);
break;
case TIMESTAMP:
Calendar c = null;
if (value instanceof Calendar) {
c = (Calendar) value;
} else {
c = Calendar.getInstance();
c.setTimeInMillis(((Timestamp) value).getTime());
}
calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
break;
case INTERVAL_YEAR_MONTH:
// Calcite year-month literal value is months as BigDecimal
BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
break;
case INTERVAL_DAY_TIME:
// Calcite day-time interval is millis value as BigDecimal
// Seconds converted to millis
BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
// Nanos converted to millis
BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
break;
case VOID:
calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
break;
case BINARY:
case UNKNOWN:
default:
throw new RuntimeException("UnSupported Literal");
}
return calciteLiteral;
}
use of org.apache.calcite.sql.parser.SqlParserPos in project hive by apache.
the class TypeConverter method convert.
public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) {
RelDataType convertedType = null;
switch(type.getPrimitiveCategory()) {
case VOID:
convertedType = dtFactory.createSqlType(SqlTypeName.NULL);
break;
case BOOLEAN:
convertedType = dtFactory.createSqlType(SqlTypeName.BOOLEAN);
break;
case BYTE:
convertedType = dtFactory.createSqlType(SqlTypeName.TINYINT);
break;
case SHORT:
convertedType = dtFactory.createSqlType(SqlTypeName.SMALLINT);
break;
case INT:
convertedType = dtFactory.createSqlType(SqlTypeName.INTEGER);
break;
case LONG:
convertedType = dtFactory.createSqlType(SqlTypeName.BIGINT);
break;
case FLOAT:
convertedType = dtFactory.createSqlType(SqlTypeName.FLOAT);
break;
case DOUBLE:
convertedType = dtFactory.createSqlType(SqlTypeName.DOUBLE);
break;
case STRING:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case DATE:
convertedType = dtFactory.createSqlType(SqlTypeName.DATE);
break;
case TIMESTAMP:
convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
break;
case INTERVAL_YEAR_MONTH:
convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
break;
case INTERVAL_DAY_TIME:
convertedType = dtFactory.createSqlIntervalType(new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
break;
case BINARY:
convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
break;
case DECIMAL:
DecimalTypeInfo dtInf = (DecimalTypeInfo) type;
convertedType = dtFactory.createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale());
break;
case VARCHAR:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.VARCHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case CHAR:
convertedType = dtFactory.createTypeWithCharsetAndCollation(dtFactory.createSqlType(SqlTypeName.CHAR, ((BaseCharTypeInfo) type).getLength()), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
break;
case UNKNOWN:
convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
break;
}
if (null == convertedType) {
throw new RuntimeException("Unsupported Type : " + type.getTypeName());
}
return dtFactory.createTypeWithNullability(convertedType, true);
}
use of org.apache.calcite.sql.parser.SqlParserPos in project drill by apache.
the class DrillAvgVarianceConvertlet method expandVariance.
private SqlNode expandVariance(final SqlNode arg, boolean biased, boolean sqrt) {
/* stddev_pop(x) ==>
* power(
* (sum(x * x) - sum(x) * sum(x) / count(x))
* / count(x),
* .5)
* stddev_samp(x) ==>
* power(
* (sum(x * x) - sum(x) * sum(x) / count(x))
* / (count(x) - 1),
* .5)
* var_pop(x) ==>
* (sum(x * x) - sum(x) * sum(x) / count(x))
* / count(x)
* var_samp(x) ==>
* (sum(x * x) - sum(x) * sum(x) / count(x))
* / (count(x) - 1)
*/
final SqlParserPos pos = SqlParserPos.ZERO;
// cast the argument to double
final SqlNode castHighArg = CastHighOp.createCall(pos, arg);
final SqlNode argSquared = SqlStdOperatorTable.MULTIPLY.createCall(pos, castHighArg, castHighArg);
final SqlNode sumArgSquared = SqlStdOperatorTable.SUM.createCall(pos, argSquared);
final SqlNode sum = SqlStdOperatorTable.SUM.createCall(pos, castHighArg);
final SqlNode sumSquared = SqlStdOperatorTable.MULTIPLY.createCall(pos, sum, sum);
final SqlNode count = SqlStdOperatorTable.COUNT.createCall(pos, castHighArg);
final SqlNode avgSumSquared = SqlStdOperatorTable.DIVIDE.createCall(pos, sumSquared, count);
final SqlNode diff = SqlStdOperatorTable.MINUS.createCall(pos, sumArgSquared, avgSumSquared);
final SqlNode denominator;
if (biased) {
denominator = count;
} else {
final SqlNumericLiteral one = SqlLiteral.createExactNumeric("1", pos);
denominator = SqlStdOperatorTable.MINUS.createCall(pos, count, one);
}
final SqlNode diffAsDouble = CastHighOp.createCall(pos, diff);
final SqlNode div = SqlStdOperatorTable.DIVIDE.createCall(pos, diffAsDouble, denominator);
SqlNode result = div;
if (sqrt) {
final SqlNumericLiteral half = SqlLiteral.createExactNumeric("0.5", pos);
result = SqlStdOperatorTable.POWER.createCall(pos, div, half);
}
return result;
}
use of org.apache.calcite.sql.parser.SqlParserPos in project drill by apache.
the class DrillAvgVarianceConvertlet method expandAvg.
private SqlNode expandAvg(final SqlNode arg) {
final SqlParserPos pos = SqlParserPos.ZERO;
final SqlNode sum = SqlStdOperatorTable.SUM.createCall(pos, arg);
final SqlNode count = SqlStdOperatorTable.COUNT.createCall(pos, arg);
final SqlNode sumAsDouble = CastHighOp.createCall(pos, sum);
return SqlStdOperatorTable.DIVIDE.createCall(pos, sumAsDouble, count);
}
use of org.apache.calcite.sql.parser.SqlParserPos in project drill by apache.
the class TestDrillSQLWorker method testErrorFormating.
@Test
public void testErrorFormating() {
String sql = "Select * from Foo\nwhere tadadidada;\n";
validateFormattedIs(sql, new SqlParserPos(1, 2), "Select * from Foo\n" + " ^\n" + "where tadadidada;\n");
validateFormattedIs(sql, new SqlParserPos(2, 2), "Select * from Foo\n" + "where tadadidada;\n" + " ^\n");
validateFormattedIs(sql, new SqlParserPos(1, 10), "Select * from Foo\n" + " ^\n" + "where tadadidada;\n");
validateFormattedIs(sql, new SqlParserPos(-11, -10), sql);
validateFormattedIs(sql, new SqlParserPos(0, 10), sql);
validateFormattedIs(sql, new SqlParserPos(100, 10), sql);
}
Aggregations