use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.
the class ArrayConcatSqlAggregator method toDruidAggregation.
@Nullable
@Override
public Aggregation toDruidAggregation(PlannerContext plannerContext, RowSignature rowSignature, VirtualColumnRegistry virtualColumnRegistry, RexBuilder rexBuilder, String name, AggregateCall aggregateCall, Project project, List<Aggregation> existingAggregations, boolean finalizeAggregations) {
final List<RexNode> arguments = aggregateCall.getArgList().stream().map(i -> Expressions.fromFieldAccess(rowSignature, project, i)).collect(Collectors.toList());
Integer maxSizeBytes = null;
if (arguments.size() > 1) {
RexNode maxBytes = arguments.get(1);
if (!maxBytes.isA(SqlKind.LITERAL)) {
// maxBytes must be a literal
return null;
}
maxSizeBytes = ((Number) RexLiteral.value(maxBytes)).intValue();
}
final DruidExpression arg = Expressions.toDruidExpression(plannerContext, rowSignature, arguments.get(0));
final ExprMacroTable macroTable = plannerContext.getExprMacroTable();
final String fieldName;
final ColumnType druidType = Calcites.getValueTypeForRelDataTypeFull(aggregateCall.getType());
if (druidType == null || !druidType.isArray()) {
// must be an array
return null;
}
final String initialvalue = ExpressionType.fromColumnTypeStrict(druidType).asTypeString() + "[]";
if (arg.isDirectColumnAccess()) {
fieldName = arg.getDirectColumn();
} else {
VirtualColumn vc = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, druidType);
fieldName = vc.getOutputName();
}
if (aggregateCall.isDistinct()) {
return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
} else {
return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_concat(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_concat(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
}
}
use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.
the class Expressions method literalToDruidExpression.
@Nullable
private static DruidExpression literalToDruidExpression(final PlannerContext plannerContext, final RexNode rexNode) {
final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName();
// Translate literal.
final ColumnType columnType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
if (RexLiteral.isNullLiteral(rexNode)) {
return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
} else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) {
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral((Number) RexLiteral.value(rexNode)));
} else if (SqlTypeFamily.INTERVAL_DAY_TIME == sqlTypeName.getFamily()) {
// Calcite represents DAY-TIME intervals in milliseconds.
final long milliseconds = ((Number) RexLiteral.value(rexNode)).longValue();
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(milliseconds));
} else if (SqlTypeFamily.INTERVAL_YEAR_MONTH == sqlTypeName.getFamily()) {
// Calcite represents YEAR-MONTH intervals in months.
final long months = ((Number) RexLiteral.value(rexNode)).longValue();
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(months));
} else if (SqlTypeName.STRING_TYPES.contains(sqlTypeName)) {
return DruidExpression.ofStringLiteral(RexLiteral.stringValue(rexNode));
} else if (SqlTypeName.TIMESTAMP == sqlTypeName || SqlTypeName.DATE == sqlTypeName) {
if (RexLiteral.isNullLiteral(rexNode)) {
return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
} else {
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(Calcites.calciteDateTimeLiteralToJoda(rexNode, plannerContext.getTimeZone()).getMillis()));
}
} else if (SqlTypeName.BOOLEAN == sqlTypeName) {
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(RexLiteral.booleanValue(rexNode) ? 1 : 0));
} else {
// Can't translate other literals.
return null;
}
}
use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.
the class TimeArithmeticOperatorConversion method toDruidExpression.
@Override
public DruidExpression toDruidExpression(final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode rexNode) {
final RexCall call = (RexCall) rexNode;
final List<RexNode> operands = call.getOperands();
if (operands.size() != 2) {
throw new IAE("Expected 2 args, got %s", operands.size());
}
final RexNode leftRexNode = operands.get(0);
final RexNode rightRexNode = operands.get(1);
final DruidExpression leftExpr = Expressions.toDruidExpression(plannerContext, rowSignature, leftRexNode);
final DruidExpression rightExpr = Expressions.toDruidExpression(plannerContext, rowSignature, rightRexNode);
if (leftExpr == null || rightExpr == null) {
return null;
}
final ColumnType outputType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
// Period is a value in months.
return DruidExpression.ofExpression(outputType, DruidExpression.functionCall("timestamp_shift"), ImmutableList.of(leftExpr, rightExpr.map(simpleExtraction -> null, expression -> rightRexNode.isA(SqlKind.LITERAL) ? StringUtils.format("'P%sM'", RexLiteral.value(rightRexNode)) : StringUtils.format("concat('P', %s, 'M')", expression)), DruidExpression.ofLiteral(ColumnType.LONG, DruidExpression.numberLiteral(direction > 0 ? 1 : -1)), DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())));
} else if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_DAY_TIME) {
// Period is a value in milliseconds. Ignore time zone.
return DruidExpression.ofExpression(outputType, (args) -> StringUtils.format("(%s %s %s)", args.get(0).getExpression(), direction > 0 ? "+" : "-", args.get(1).getExpression()), ImmutableList.of(leftExpr, rightExpr));
} else if ((leftRexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP || leftRexNode.getType().getFamily() == SqlTypeFamily.DATE) && (rightRexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP || rightRexNode.getType().getFamily() == SqlTypeFamily.DATE)) {
// Calcite represents both TIMESTAMP - INTERVAL and TIMESTAMPDIFF (TIMESTAMP - TIMESTAMP)
// with a MINUS_DATE operator, so we must tell which case we're in by checking the type of
// the second argument.
Preconditions.checkState(direction < 0, "Time arithmetic require direction < 0");
if (call.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
return DruidExpression.ofExpression(outputType, DruidExpression.functionCall("subtract_months"), ImmutableList.of(leftExpr, rightExpr, DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())));
} else {
return DruidExpression.ofExpression(outputType, (args) -> StringUtils.format("(%s %s %s)", args.get(0).getExpression(), "-", args.get(1).getExpression()), ImmutableList.of(leftExpr, rightExpr));
}
} else {
// Shouldn't happen if subclasses are behaving.
throw new ISE("Got unexpected type period type family[%s]", rightRexNode.getType().getFamily());
}
}
use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.
the class InputRowSerde method fromBytes.
public static InputRow fromBytes(final Map<String, IndexSerdeTypeHelper> typeHelperMap, byte[] data, AggregatorFactory[] aggs) {
try {
ByteArrayDataInput in = ByteStreams.newDataInput(data);
// Read timestamp
long timestamp = in.readLong();
Map<String, Object> event = new HashMap<>();
// Read dimensions
List<String> dimensions = new ArrayList<>();
int dimNum = WritableUtils.readVInt(in);
for (int i = 0; i < dimNum; i++) {
String dimension = readString(in);
dimensions.add(dimension);
IndexSerdeTypeHelper typeHelper = typeHelperMap.get(dimension);
if (typeHelper == null) {
typeHelper = STRING_HELPER;
}
Object dimValues = typeHelper.deserialize(in);
if (dimValues == null) {
continue;
}
if (typeHelper.getType() == ValueType.STRING) {
List<String> dimensionValues = (List<String>) dimValues;
if (dimensionValues.size() == 1) {
event.put(dimension, dimensionValues.get(0));
} else {
event.put(dimension, dimensionValues);
}
} else {
event.put(dimension, dimValues);
}
}
// Read metrics
int metricSize = WritableUtils.readVInt(in);
for (int i = 0; i < metricSize; i++) {
final String metric = readString(in);
final AggregatorFactory agg = getAggregator(metric, aggs, i);
final ColumnType type = agg.getIntermediateType();
final byte metricNullability = in.readByte();
if (metricNullability == NullHandling.IS_NULL_BYTE) {
// metric value is null.
continue;
}
if (type.is(ValueType.FLOAT)) {
event.put(metric, in.readFloat());
} else if (type.is(ValueType.LONG)) {
event.put(metric, WritableUtils.readVLong(in));
} else if (type.is(ValueType.DOUBLE)) {
event.put(metric, in.readDouble());
} else {
ComplexMetricSerde serde = getComplexMetricSerde(agg.getIntermediateType().getComplexTypeName());
byte[] value = readBytes(in);
event.put(metric, serde.fromBytes(value, 0, value.length));
}
}
return new MapBasedInputRow(timestamp, dimensions, event);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.
the class InlineDataSource method fromJson.
/**
* Factory method for Jackson. Used for inline datasources that were originally encoded as JSON. Private because
* non-Jackson callers should use {@link #fromIterable}.
*/
@JsonCreator
private static InlineDataSource fromJson(@JsonProperty("columnNames") List<String> columnNames, @JsonProperty("columnTypes") List<ColumnType> columnTypes, @JsonProperty("rows") List<Object[]> rows) {
Preconditions.checkNotNull(columnNames, "'columnNames' must be nonnull");
if (columnTypes != null && columnNames.size() != columnTypes.size()) {
throw new IAE("columnNames and columnTypes must be the same length");
}
final RowSignature.Builder builder = RowSignature.builder();
for (int i = 0; i < columnNames.size(); i++) {
final String name = columnNames.get(i);
final ColumnType type = columnTypes != null ? columnTypes.get(i) : null;
builder.add(name, type);
}
return new InlineDataSource(rows, builder.build());
}
Aggregations