use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataType in project drill by apache.
the class AvroDrillTable method getNullableRelDataTypeFromAvroType.
private RelDataType getNullableRelDataTypeFromAvroType(RelDataTypeFactory typeFactory, Schema fieldSchema) {
RelDataType relDataType = null;
switch(fieldSchema.getType()) {
case ARRAY:
RelDataType eleType = getNullableRelDataTypeFromAvroType(typeFactory, fieldSchema.getElementType());
relDataType = typeFactory.createArrayType(eleType, -1);
break;
case BOOLEAN:
relDataType = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
break;
case BYTES:
relDataType = typeFactory.createSqlType(SqlTypeName.BINARY);
break;
case DOUBLE:
relDataType = typeFactory.createSqlType(SqlTypeName.DOUBLE);
break;
case FIXED:
logger.error("{} type not supported", fieldSchema.getType());
throw UserException.unsupportedError().message("FIXED type not supported yet").build(logger);
case FLOAT:
relDataType = typeFactory.createSqlType(SqlTypeName.FLOAT);
break;
case INT:
relDataType = typeFactory.createSqlType(SqlTypeName.INTEGER);
break;
case LONG:
relDataType = typeFactory.createSqlType(SqlTypeName.BIGINT);
break;
case MAP:
RelDataType valueType = getNullableRelDataTypeFromAvroType(typeFactory, fieldSchema.getValueType());
RelDataType keyType = typeFactory.createSqlType(SqlTypeName.VARCHAR);
relDataType = typeFactory.createMapType(keyType, valueType);
break;
case NULL:
relDataType = typeFactory.createSqlType(SqlTypeName.NULL);
break;
case RECORD:
// List<String> fieldNameList = Lists.newArrayList();
// List<RelDataType> fieldRelDataTypeList = Lists.newArrayList();
// for(Field field : fieldSchema.getFields()) {
// fieldNameList.add(field.name());
// fieldRelDataTypeList.add(getNullableRelDataTypeFromAvroType(typeFactory, field.schema()));
// }
// relDataType = typeFactory.createStructType(fieldRelDataTypeList, fieldNameList);
//TODO This has to be mapped to struct type but because of calcite issue,
//for now mapping it to map type.
keyType = typeFactory.createSqlType(SqlTypeName.VARCHAR);
valueType = typeFactory.createSqlType(SqlTypeName.ANY);
relDataType = typeFactory.createMapType(keyType, valueType);
break;
case ENUM:
case STRING:
relDataType = typeFactory.createSqlType(SqlTypeName.VARCHAR);
break;
case UNION:
RelDataType optinalType = getNullableRelDataTypeFromAvroType(typeFactory, fieldSchema.getTypes().get(1));
relDataType = typeFactory.createTypeWithNullability(optinalType, true);
break;
}
return relDataType;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataType in project drill by apache.
the class InfoSchemaRecordGenerator method visitTables.
/**
* Visit the tables in the given schema. The
* @param schemaPath the path to the given schema
* @param schema the given schema
*/
public void visitTables(String schemaPath, SchemaPlus schema) {
final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class);
final List<String> tableNames = Lists.newArrayList(schema.getTableNames());
for (Pair<String, ? extends Table> tableNameToTable : drillSchema.getTablesByNames(tableNames)) {
final String tableName = tableNameToTable.getKey();
final Table table = tableNameToTable.getValue();
final TableType tableType = table.getJdbcTableType();
// Visit the table, and if requested ...
if (shouldVisitTable(schemaPath, tableName, tableType) && visitTable(schemaPath, tableName, table)) {
// ... do for each of the table's fields.
final RelDataType tableRow = table.getRowType(new JavaTypeFactoryImpl());
for (RelDataTypeField field : tableRow.getFieldList()) {
if (shouldVisitColumn(schemaPath, tableName, field.getName())) {
visitField(schemaPath, tableName, field);
}
}
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataType in project drill by apache.
the class InsertLocalExchangeVisitor method visitExchange.
@Override
public Prel visitExchange(ExchangePrel prel, Void value) throws RuntimeException {
Prel child = ((Prel) prel.getInput()).accept(this, null);
// If DeMuxExchange is enabled, insert a UnorderedDeMuxExchangePrel after HashToRandomExchangePrel.
if (!(prel instanceof HashToRandomExchangePrel)) {
return (Prel) prel.copy(prel.getTraitSet(), Collections.singletonList(((RelNode) child)));
}
Prel newPrel = child;
final HashToRandomExchangePrel hashPrel = (HashToRandomExchangePrel) prel;
final List<String> childFields = child.getRowType().getFieldNames();
List<RexNode> removeUpdatedExpr = null;
if (isMuxEnabled) {
// Insert Project Operator with new column that will be a hash for HashToRandomExchange fields
final List<DistributionField> distFields = hashPrel.getFields();
final List<String> outputFieldNames = Lists.newArrayList(childFields);
final RexBuilder rexBuilder = prel.getCluster().getRexBuilder();
final List<RelDataTypeField> childRowTypeFields = child.getRowType().getFieldList();
final HashExpressionCreatorHelper<RexNode> hashHelper = new RexNodeBasedHashExpressionCreatorHelper(rexBuilder);
final List<RexNode> distFieldRefs = Lists.newArrayListWithExpectedSize(distFields.size());
for (int i = 0; i < distFields.size(); i++) {
final int fieldId = distFields.get(i).getFieldId();
distFieldRefs.add(rexBuilder.makeInputRef(childRowTypeFields.get(fieldId).getType(), fieldId));
}
final List<RexNode> updatedExpr = Lists.newArrayListWithExpectedSize(childRowTypeFields.size());
removeUpdatedExpr = Lists.newArrayListWithExpectedSize(childRowTypeFields.size());
for (RelDataTypeField field : childRowTypeFields) {
RexNode rex = rexBuilder.makeInputRef(field.getType(), field.getIndex());
updatedExpr.add(rex);
removeUpdatedExpr.add(rex);
}
outputFieldNames.add(HashPrelUtil.HASH_EXPR_NAME);
// distribution seed
final RexNode distSeed = rexBuilder.makeBigintLiteral(BigDecimal.valueOf(HashPrelUtil.DIST_SEED));
updatedExpr.add(HashPrelUtil.createHashBasedPartitionExpression(distFieldRefs, distSeed, hashHelper));
RelDataType rowType = RexUtil.createStructType(prel.getCluster().getTypeFactory(), updatedExpr, outputFieldNames);
ProjectPrel addColumnprojectPrel = new ProjectPrel(child.getCluster(), child.getTraitSet(), child, updatedExpr, rowType);
newPrel = new UnorderedMuxExchangePrel(addColumnprojectPrel.getCluster(), addColumnprojectPrel.getTraitSet(), addColumnprojectPrel);
}
newPrel = new HashToRandomExchangePrel(prel.getCluster(), prel.getTraitSet(), newPrel, ((HashToRandomExchangePrel) prel).getFields());
if (isDeMuxEnabled) {
HashToRandomExchangePrel hashExchangePrel = (HashToRandomExchangePrel) newPrel;
// Insert a DeMuxExchange to narrow down the number of receivers
newPrel = new UnorderedDeMuxExchangePrel(prel.getCluster(), prel.getTraitSet(), hashExchangePrel, hashExchangePrel.getFields());
}
if (isMuxEnabled) {
// remove earlier inserted Project Operator - since it creates issues down the road in HashJoin
RelDataType removeRowType = RexUtil.createStructType(newPrel.getCluster().getTypeFactory(), removeUpdatedExpr, childFields);
ProjectPrel removeColumnProjectPrel = new ProjectPrel(newPrel.getCluster(), newPrel.getTraitSet(), newPrel, removeUpdatedExpr, removeRowType);
return removeColumnProjectPrel;
}
return newPrel;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataType in project hive by apache.
the class HiveSubQRemoveRelBuilder method call.
/**
* Creates a call to a scalar operator.
*/
public RexNode call(SqlOperator operator, RexNode... operands) {
final RexBuilder builder = cluster.getRexBuilder();
final List<RexNode> operandList = ImmutableList.copyOf(operands);
final RelDataType type = builder.deriveReturnType(operator, operandList);
if (type == null) {
throw new IllegalArgumentException("cannot derive type: " + operator + "; operands: " + Lists.transform(operandList, FN_TYPE));
}
return builder.makeCall(type, operator, operandList);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataType in project hive by apache.
the class HiveSubQRemoveRelBuilder method values.
/**
* Creates a {@link Values}.
*
* <p>The {@code values} array must have the same number of entries as
* {@code fieldNames}, or an integer multiple if you wish to create multiple
* rows.
*
* <p>If there are zero rows, or if all values of a any column are
* null, this method cannot deduce the type of columns. For these cases,
* call {@link #values(Iterable, RelDataType)}.
*
* @param fieldNames Field names
* @param values Values
*/
public HiveSubQRemoveRelBuilder values(String[] fieldNames, Object... values) {
if (fieldNames == null || fieldNames.length == 0 || values.length % fieldNames.length != 0 || values.length < fieldNames.length) {
throw new IllegalArgumentException("Value count must be a positive multiple of field count");
}
final int rowCount = values.length / fieldNames.length;
for (Ord<String> fieldName : Ord.zip(fieldNames)) {
if (allNull(values, fieldName.i, fieldNames.length)) {
throw new IllegalArgumentException("All values of field '" + fieldName.e + "' are null; cannot deduce type");
}
}
final ImmutableList<ImmutableList<RexLiteral>> tupleList = tupleList(fieldNames.length, values);
final RelDataTypeFactory.FieldInfoBuilder rowTypeBuilder = cluster.getTypeFactory().builder();
for (final Ord<String> fieldName : Ord.zip(fieldNames)) {
final String name = fieldName.e != null ? fieldName.e : "expr$" + fieldName.i;
final RelDataType type = cluster.getTypeFactory().leastRestrictive(new AbstractList<RelDataType>() {
public RelDataType get(int index) {
return tupleList.get(index).get(fieldName.i).getType();
}
public int size() {
return rowCount;
}
});
rowTypeBuilder.add(name, type);
}
final RelDataType rowType = rowTypeBuilder.build();
return values(tupleList, rowType);
}
Aggregations