use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.ArraySqlType in project calcite by apache.
the class SqlUnnestOperator method inferReturnType.
// ~ Methods ----------------------------------------------------------------
@Override
public RelDataType inferReturnType(SqlOperatorBinding opBinding) {
final RelDataTypeFactory.Builder builder = opBinding.getTypeFactory().builder();
for (Integer operand : Util.range(opBinding.getOperandCount())) {
RelDataType type = opBinding.getOperandType(operand);
if (type.isStruct()) {
type = type.getFieldList().get(0).getType();
}
assert type instanceof ArraySqlType || type instanceof MultisetSqlType || type instanceof MapSqlType;
if (type instanceof MapSqlType) {
builder.add(MAP_KEY_COLUMN_NAME, type.getKeyType());
builder.add(MAP_VALUE_COLUMN_NAME, type.getValueType());
} else {
if (type.getComponentType().isStruct()) {
builder.addAll(type.getComponentType().getFieldList());
} else {
builder.add(SqlUtil.deriveAliasFromOrdinal(operand), type.getComponentType());
}
}
}
if (withOrdinality) {
builder.add(ORDINALITY_COLUMN_NAME, SqlTypeName.INTEGER);
}
return builder.build();
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.ArraySqlType in project samza by apache.
the class RelSchemaConverter method getRelDataType.
// TODO: SAMZA-2345 - Change RelSchemaConverter code to apply nullability based on Sql schema.
public RelDataType getRelDataType(SqlFieldSchema fieldSchema) {
switch(fieldSchema.getFieldType()) {
case ARRAY:
RelDataType elementType = getRelDataType(fieldSchema.getElementSchema());
return new ArraySqlType(elementType, true);
case BOOLEAN:
return createTypeWithNullability(createSqlType(SqlTypeName.BOOLEAN), true);
case DOUBLE:
return createTypeWithNullability(createSqlType(SqlTypeName.DOUBLE), true);
case REAL:
return createTypeWithNullability(createSqlType(SqlTypeName.REAL), true);
case FLOAT:
return createTypeWithNullability(createSqlType(SqlTypeName.FLOAT), true);
case STRING:
return createTypeWithNullability(createSqlType(SqlTypeName.VARCHAR), true);
case BYTES:
return createTypeWithNullability(createSqlType(SqlTypeName.VARBINARY), true);
case INT16:
case INT32:
return createTypeWithNullability(createSqlType(SqlTypeName.INTEGER), true);
case INT64:
return createTypeWithNullability(createSqlType(SqlTypeName.BIGINT), true);
case ROW:
case ANY:
// TODO Calcite execution engine doesn't support record type yet.
return createTypeWithNullability(createSqlType(SqlTypeName.ANY), true);
case MAP:
RelDataType valueType = getRelDataType(fieldSchema.getValueSchema());
return new MapSqlType(createSqlType(SqlTypeName.VARCHAR), valueType, true);
default:
String msg = String.format("Field Type %s is not supported", fieldSchema.getFieldType());
LOG.error(msg);
throw new SamzaException(msg);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.ArraySqlType in project druid by druid-io.
the class DruidRexExecutorTest method testArrayOfLongsReduction.
@Test
public void testArrayOfLongsReduction() {
DruidRexExecutor rexy = new DruidRexExecutor(PLANNER_CONTEXT);
List<RexNode> reduced = new ArrayList<>();
BasicSqlType basicSqlType = new BasicSqlType(DruidTypeSystem.INSTANCE, SqlTypeName.INTEGER);
ArraySqlType arraySqlType = new ArraySqlType(basicSqlType, false);
List<BigDecimal> elements = ImmutableList.of(BigDecimal.valueOf(50), BigDecimal.valueOf(12));
RexNode literal = rexBuilder.makeLiteral(elements, arraySqlType, true);
rexy.reduce(rexBuilder, ImmutableList.of(literal), reduced);
Assert.assertEquals(1, reduced.size());
Assert.assertEquals(DruidExpression.ofExpression(ColumnType.LONG_ARRAY, DruidExpression.functionCall("array"), ImmutableList.of(DruidExpression.ofLiteral(ColumnType.LONG, "50"), DruidExpression.ofLiteral(ColumnType.LONG, "12"))), Expressions.toDruidExpression(PLANNER_CONTEXT, RowSignature.empty(), reduced.get(0)));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.ArraySqlType in project druid by druid-io.
the class DruidRexExecutorTest method testArrayOfDoublesReduction.
@Test
public void testArrayOfDoublesReduction() {
DruidRexExecutor rexy = new DruidRexExecutor(PLANNER_CONTEXT);
List<RexNode> reduced = new ArrayList<>();
BasicSqlType basicSqlType = new BasicSqlType(DruidTypeSystem.INSTANCE, SqlTypeName.DECIMAL);
ArraySqlType arraySqlType = new ArraySqlType(basicSqlType, false);
List<BigDecimal> elements = ImmutableList.of(BigDecimal.valueOf(50.12), BigDecimal.valueOf(12.1));
RexNode literal = rexBuilder.makeLiteral(elements, arraySqlType, true);
rexy.reduce(rexBuilder, ImmutableList.of(literal), reduced);
Assert.assertEquals(1, reduced.size());
Assert.assertEquals(DruidExpression.ofExpression(ColumnType.DOUBLE_ARRAY, DruidExpression.functionCall("array"), ImmutableList.of(DruidExpression.ofLiteral(ColumnType.DOUBLE, "50.12"), DruidExpression.ofLiteral(ColumnType.DOUBLE, "12.1"))), Expressions.toDruidExpression(PLANNER_CONTEXT, RowSignature.empty(), reduced.get(0)));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.ArraySqlType in project beam by apache.
the class ZetaSqlUnnest method deriveUncollectRowType.
/**
* Returns the row type returned by applying the 'UNNEST' operation to a relational expression.
*
* <p>Each column in the relational expression must be a multiset of structs or an array. The
* return type is the type of that column, plus an ORDINALITY column if {@code withOrdinality}.
*/
public static RelDataType deriveUncollectRowType(RelNode rel, boolean withOrdinality) {
RelDataType inputType = rel.getRowType();
assert inputType.isStruct() : inputType + " is not a struct";
final List<RelDataTypeField> fields = inputType.getFieldList();
final RelDataTypeFactory typeFactory = rel.getCluster().getTypeFactory();
final RelDataTypeFactory.Builder builder = typeFactory.builder();
if (fields.size() == 1 && fields.get(0).getType().getSqlTypeName() == SqlTypeName.ANY) {
// and Any type.
return builder.add(fields.get(0).getName(), SqlTypeName.ANY).nullable(true).build();
}
for (RelDataTypeField field : fields) {
if (field.getType() instanceof MapSqlType) {
builder.add(SqlUnnestOperator.MAP_KEY_COLUMN_NAME, Preconditions.checkArgumentNotNull(field.getType().getKeyType(), "Encountered MAP type with null key type in field %s", field));
builder.add(SqlUnnestOperator.MAP_VALUE_COLUMN_NAME, Preconditions.checkArgumentNotNull(field.getType().getValueType(), "Encountered MAP type with null value type in field %s", field));
} else {
assert field.getType() instanceof ArraySqlType;
RelDataType ret = Preconditions.checkArgumentNotNull(field.getType().getComponentType(), "Encountered ARRAY type with null component type in field %s", field);
// Only difference than Uncollect.java: treats record type and scalar type equally
builder.add(SqlUtil.deriveAliasFromOrdinal(field.getIndex()), ret);
}
}
if (withOrdinality) {
builder.add(SqlUnnestOperator.ORDINALITY_COLUMN_NAME, SqlTypeName.INTEGER);
}
return builder.build();
}
Aggregations