use of org.apache.calcite.sql.SqlDataTypeSpec in project calcite by apache.
the class SqlCreateTable method execute.
public void execute(CalcitePrepare.Context context) {
final List<String> path = context.getDefaultSchemaPath();
CalciteSchema schema = context.getRootSchema();
for (String p : path) {
schema = schema.getSubSchema(p, true);
}
final JavaTypeFactory typeFactory = new JavaTypeFactoryImpl();
final RelDataTypeFactory.Builder builder = typeFactory.builder();
for (Pair<SqlIdentifier, SqlDataTypeSpec> pair : nameTypes()) {
builder.add(pair.left.getSimple(), pair.right.deriveType(typeFactory, true));
}
final RelDataType rowType = builder.build();
schema.add(name.getSimple(), new MutableArrayTable(name.getSimple(), RelDataTypeImpl.proto(rowType)));
}
use of org.apache.calcite.sql.SqlDataTypeSpec in project calcite by apache.
the class StandardConvertletTable method convertCast.
protected RexNode convertCast(SqlRexContext cx, final SqlCall call) {
RelDataTypeFactory typeFactory = cx.getTypeFactory();
assert call.getKind() == SqlKind.CAST;
final SqlNode left = call.operand(0);
final SqlNode right = call.operand(1);
if (right instanceof SqlIntervalQualifier) {
final SqlIntervalQualifier intervalQualifier = (SqlIntervalQualifier) right;
if (left instanceof SqlIntervalLiteral) {
RexLiteral sourceInterval = (RexLiteral) cx.convertExpression(left);
BigDecimal sourceValue = (BigDecimal) sourceInterval.getValue();
RexLiteral castedInterval = cx.getRexBuilder().makeIntervalLiteral(sourceValue, intervalQualifier);
return castToValidatedType(cx, call, castedInterval);
} else if (left instanceof SqlNumericLiteral) {
RexLiteral sourceInterval = (RexLiteral) cx.convertExpression(left);
BigDecimal sourceValue = (BigDecimal) sourceInterval.getValue();
final BigDecimal multiplier = intervalQualifier.getUnit().multiplier;
sourceValue = sourceValue.multiply(multiplier);
RexLiteral castedInterval = cx.getRexBuilder().makeIntervalLiteral(sourceValue, intervalQualifier);
return castToValidatedType(cx, call, castedInterval);
}
return castToValidatedType(cx, call, cx.convertExpression(left));
}
SqlDataTypeSpec dataType = (SqlDataTypeSpec) right;
if (SqlUtil.isNullLiteral(left, false)) {
return cx.convertExpression(left);
}
RexNode arg = cx.convertExpression(left);
RelDataType type = dataType.deriveType(typeFactory);
if (arg.getType().isNullable()) {
type = typeFactory.createTypeWithNullability(type, true);
}
if (null != dataType.getCollectionsTypeName()) {
final RelDataType argComponentType = arg.getType().getComponentType();
final RelDataType componentType = type.getComponentType();
if (argComponentType.isStruct() && !componentType.isStruct()) {
RelDataType tt = typeFactory.builder().add(argComponentType.getFieldList().get(0).getName(), componentType).build();
tt = typeFactory.createTypeWithNullability(tt, componentType.isNullable());
boolean isn = type.isNullable();
type = typeFactory.createMultisetType(tt, -1);
type = typeFactory.createTypeWithNullability(type, isn);
}
}
return cx.getRexBuilder().makeCast(type, arg);
}
use of org.apache.calcite.sql.SqlDataTypeSpec in project flink by apache.
the class HiveDDLUtils method convertDataTypes.
private static SqlDataTypeSpec convertDataTypes(SqlDataTypeSpec typeSpec) throws ParseException {
SqlTypeNameSpec nameSpec = typeSpec.getTypeNameSpec();
SqlTypeNameSpec convertedNameSpec = convertDataTypes(nameSpec);
if (nameSpec != convertedNameSpec) {
boolean nullable = typeSpec.getNullable() == null ? true : typeSpec.getNullable();
typeSpec = new SqlDataTypeSpec(convertedNameSpec, typeSpec.getTimeZone(), nullable, typeSpec.getParserPosition());
}
return typeSpec;
}
use of org.apache.calcite.sql.SqlDataTypeSpec in project flink by apache.
the class FlinkConvertletTable method convertTryCast.
// Slightly modified version of StandardConvertletTable::convertCast
private RexNode convertTryCast(SqlRexContext cx, final SqlCall call) {
RelDataTypeFactory typeFactory = cx.getTypeFactory();
final SqlNode leftNode = call.operand(0);
final SqlNode rightNode = call.operand(1);
final RexNode valueRex = cx.convertExpression(leftNode);
RelDataType type;
if (rightNode instanceof SqlIntervalQualifier) {
type = typeFactory.createSqlIntervalType((SqlIntervalQualifier) rightNode);
} else if (rightNode instanceof SqlDataTypeSpec) {
SqlDataTypeSpec dataType = ((SqlDataTypeSpec) rightNode);
type = dataType.deriveType(cx.getValidator());
if (type == null) {
type = cx.getValidator().getValidatedNodeType(dataType.getTypeName());
}
} else {
throw new IllegalStateException("Invalid right argument type for TRY_CAST: " + rightNode);
}
type = typeFactory.createTypeWithNullability(type, true);
if (SqlUtil.isNullLiteral(leftNode, false)) {
final SqlValidatorImpl validator = (SqlValidatorImpl) cx.getValidator();
validator.setValidatedNodeType(leftNode, type);
return cx.convertExpression(leftNode);
}
return cx.getRexBuilder().makeCall(type, FlinkSqlOperatorTable.TRY_CAST, Collections.singletonList(valueRex));
}
use of org.apache.calcite.sql.SqlDataTypeSpec in project flink by apache.
the class OperationConverterUtils method toTableColumn.
private static TableColumn toTableColumn(SqlTableColumn tableColumn, SqlValidator sqlValidator) {
if (!(tableColumn instanceof SqlRegularColumn)) {
throw new TableException("Only regular columns are supported for this operation yet.");
}
SqlRegularColumn regularColumn = (SqlRegularColumn) tableColumn;
String name = regularColumn.getName().getSimple();
SqlDataTypeSpec typeSpec = regularColumn.getType();
boolean nullable = typeSpec.getNullable() == null ? true : typeSpec.getNullable();
LogicalType logicalType = FlinkTypeFactory.toLogicalType(typeSpec.deriveType(sqlValidator, nullable));
DataType dataType = TypeConversions.fromLogicalToDataType(logicalType);
return TableColumn.physical(name, dataType);
}
Aggregations