use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project flink by apache.
the class FlinkRelDecorrelator method projectJoinOutputWithNullability.
/**
* Pulls project above the join from its RHS input. Enforces nullability
* for join output.
*
* @param join Join
* @param project Original project as the right-hand input of the join
* @param nullIndicatorPos Position of null indicator
* @return the subtree with the new LogicalProject at the root
*/
private RelNode projectJoinOutputWithNullability(LogicalJoin join, LogicalProject project, int nullIndicatorPos) {
final RelDataTypeFactory typeFactory = join.getCluster().getTypeFactory();
final RelNode left = join.getLeft();
final JoinRelType joinType = join.getJoinType();
RexInputRef nullIndicator = new RexInputRef(nullIndicatorPos, typeFactory.createTypeWithNullability(join.getRowType().getFieldList().get(nullIndicatorPos).getType(), true));
// now create the new project
List<Pair<RexNode, String>> newProjExprs = Lists.newArrayList();
// project everything from the LHS and then those from the original
// projRel
List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList();
for (int i = 0; i < leftInputFields.size(); i++) {
newProjExprs.add(RexInputRef.of2(i, leftInputFields));
}
// Marked where the projected expr is coming from so that the types will
// become nullable for the original projections which are now coming out
// of the nullable side of the OJ.
boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight();
for (Pair<RexNode, String> pair : project.getNamedProjects()) {
RexNode newProjExpr = removeCorrelationExpr(pair.left, projectPulledAboveLeftCorrelator, nullIndicator);
newProjExprs.add(Pair.of(newProjExpr, pair.right));
}
return RelOptUtil.createProject(join, newProjExprs, false);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project druid by druid-io.
the class RowSignature method getRelDataType.
/**
* Returns a Calcite RelDataType corresponding to this row signature.
*
* @param typeFactory factory for type construction
*
* @return Calcite row type
*/
public RelDataType getRelDataType(final RelDataTypeFactory typeFactory) {
final RelDataTypeFactory.FieldInfoBuilder builder = typeFactory.builder();
for (final String columnName : columnNames) {
final ValueType columnType = getColumnType(columnName);
final RelDataType type;
if (Column.TIME_COLUMN_NAME.equals(columnName)) {
type = typeFactory.createSqlType(SqlTypeName.TIMESTAMP);
} else {
switch(columnType) {
case STRING:
// Note that there is no attempt here to handle multi-value in any special way. Maybe one day...
type = typeFactory.createTypeWithCharsetAndCollation(typeFactory.createSqlType(SqlTypeName.VARCHAR), Calcites.defaultCharset(), SqlCollation.IMPLICIT);
break;
case LONG:
type = typeFactory.createSqlType(SqlTypeName.BIGINT);
break;
case FLOAT:
type = typeFactory.createSqlType(SqlTypeName.FLOAT);
break;
case COMPLEX:
// Loses information about exactly what kind of complex column this is.
type = typeFactory.createSqlType(SqlTypeName.OTHER);
break;
default:
throw new ISE("WTF?! valueType[%s] not translatable?", columnType);
}
}
builder.add(columnName, type);
}
return builder.build();
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project druid by druid-io.
the class DruidPlanner method planExplanation.
private PlannerResult planExplanation(final RelNode rel, final SqlExplain explain) {
final String explanation = RelOptUtil.dumpPlan("", rel, explain.getFormat(), explain.getDetailLevel());
final Supplier<Sequence<Object[]>> resultsSupplier = Suppliers.ofInstance(Sequences.simple(ImmutableList.of(new Object[] { explanation })));
final RelDataTypeFactory typeFactory = rel.getCluster().getTypeFactory();
return new PlannerResult(resultsSupplier, typeFactory.createStructType(ImmutableList.of(typeFactory.createSqlType(SqlTypeName.VARCHAR)), ImmutableList.of("PLAN")));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hive by apache.
the class HiveRelDecorrelator method projectJoinOutputWithNullability.
/**
* Pulls project above the join from its RHS input. Enforces nullability
* for join output.
*
* @param join Join
* @param project Original project as the right-hand input of the join
* @param nullIndicatorPos Position of null indicator
* @return the subtree with the new LogicalProject at the root
*/
private RelNode projectJoinOutputWithNullability(LogicalJoin join, LogicalProject project, int nullIndicatorPos) {
final RelDataTypeFactory typeFactory = join.getCluster().getTypeFactory();
final RelNode left = join.getLeft();
final JoinRelType joinType = join.getJoinType();
RexInputRef nullIndicator = new RexInputRef(nullIndicatorPos, typeFactory.createTypeWithNullability(join.getRowType().getFieldList().get(nullIndicatorPos).getType(), true));
// now create the new project
List<Pair<RexNode, String>> newProjExprs = Lists.newArrayList();
// project everything from the LHS and then those from the original
// projRel
List<RelDataTypeField> leftInputFields = left.getRowType().getFieldList();
for (int i = 0; i < leftInputFields.size(); i++) {
newProjExprs.add(RexInputRef.of2(i, leftInputFields));
}
// Marked where the projected expr is coming from so that the types will
// become nullable for the original projections which are now coming out
// of the nullable side of the OJ.
boolean projectPulledAboveLeftCorrelator = joinType.generatesNullsOnRight();
for (Pair<RexNode, String> pair : project.getNamedProjects()) {
RexNode newProjExpr = removeCorrelationExpr(pair.left, projectPulledAboveLeftCorrelator, nullIndicator);
newProjExprs.add(Pair.of(newProjExpr, pair.right));
}
return RelOptUtil.createProject(join, newProjExprs, false);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project calcite by apache.
the class CassandraTable method query.
/**
* Executes a CQL query on the underlying table.
*
* @param session Cassandra session
* @param fields List of fields to project
* @param predicates A list of predicates which should be used in the query
* @return Enumerator of results
*/
public Enumerable<Object> query(final Session session, List<Map.Entry<String, Class>> fields, final List<Map.Entry<String, String>> selectFields, List<String> predicates, List<String> order, final Integer offset, final Integer fetch) {
// Build the type of the resulting row based on the provided fields
final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataTypeFactory.Builder fieldInfo = typeFactory.builder();
final RelDataType rowType = getRowType(typeFactory);
Function1<String, Void> addField = new Function1<String, Void>() {
public Void apply(String fieldName) {
SqlTypeName typeName = rowType.getField(fieldName, true, false).getType().getSqlTypeName();
fieldInfo.add(fieldName, typeFactory.createSqlType(typeName)).nullable(true);
return null;
}
};
if (selectFields.isEmpty()) {
for (Map.Entry<String, Class> field : fields) {
addField.apply(field.getKey());
}
} else {
for (Map.Entry<String, String> field : selectFields) {
addField.apply(field.getKey());
}
}
final RelProtoDataType resultRowType = RelDataTypeImpl.proto(fieldInfo.build());
// Construct the list of fields to project
final String selectString;
if (selectFields.isEmpty()) {
selectString = "*";
} else {
selectString = Util.toString(new Iterable<String>() {
public Iterator<String> iterator() {
final Iterator<Map.Entry<String, String>> selectIterator = selectFields.iterator();
return new Iterator<String>() {
@Override
public boolean hasNext() {
return selectIterator.hasNext();
}
@Override
public String next() {
Map.Entry<String, String> entry = selectIterator.next();
return entry.getKey() + " AS " + entry.getValue();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}, "", ", ", "");
}
// Combine all predicates conjunctively
String whereClause = "";
if (!predicates.isEmpty()) {
whereClause = " WHERE ";
whereClause += Util.toString(predicates, "", " AND ", "");
}
// Build and issue the query and return an Enumerator over the results
StringBuilder queryBuilder = new StringBuilder("SELECT ");
queryBuilder.append(selectString);
queryBuilder.append(" FROM \"" + columnFamily + "\"");
queryBuilder.append(whereClause);
if (!order.isEmpty()) {
queryBuilder.append(Util.toString(order, " ORDER BY ", ", ", ""));
}
int limit = offset;
if (fetch >= 0) {
limit += fetch;
}
if (limit > 0) {
queryBuilder.append(" LIMIT " + limit);
}
queryBuilder.append(" ALLOW FILTERING");
final String query = queryBuilder.toString();
return new AbstractEnumerable<Object>() {
public Enumerator<Object> enumerator() {
final ResultSet results = session.execute(query);
// Skip results until we get to the right offset
int skip = 0;
Enumerator<Object> enumerator = new CassandraEnumerator(results, resultRowType);
while (skip < offset && enumerator.moveNext()) {
skip++;
}
return enumerator;
}
};
}
Aggregations