use of org.apache.calcite.rel.type.RelDataTypeField in project druid by druid-io.
the class SqlResource method doPost.
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery) throws SQLException, IOException {
// This is not integrated with the experimental authorization framework.
// (Non-trivial since we don't know the dataSources up-front)
final PlannerResult plannerResult;
final DateTimeZone timeZone;
try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) {
plannerResult = planner.plan(sqlQuery.getQuery());
timeZone = planner.getPlannerContext().getTimeZone();
// Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.
final List<RelDataTypeField> fieldList = plannerResult.rowType().getFieldList();
final boolean[] timeColumns = new boolean[fieldList.size()];
final boolean[] dateColumns = new boolean[fieldList.size()];
for (int i = 0; i < fieldList.size(); i++) {
final SqlTypeName sqlTypeName = fieldList.get(i).getType().getSqlTypeName();
timeColumns[i] = sqlTypeName == SqlTypeName.TIMESTAMP;
dateColumns[i] = sqlTypeName == SqlTypeName.DATE;
}
final Yielder<Object[]> yielder0 = Yielders.each(plannerResult.run());
try {
return Response.ok(new StreamingOutput() {
@Override
public void write(final OutputStream outputStream) throws IOException, WebApplicationException {
Yielder<Object[]> yielder = yielder0;
try (final JsonGenerator jsonGenerator = jsonMapper.getFactory().createGenerator(outputStream)) {
jsonGenerator.writeStartArray();
while (!yielder.isDone()) {
final Object[] row = yielder.get();
jsonGenerator.writeStartObject();
for (int i = 0; i < fieldList.size(); i++) {
final Object value;
if (timeColumns[i]) {
value = ISODateTimeFormat.dateTime().print(Calcites.calciteTimestampToJoda((long) row[i], timeZone));
} else if (dateColumns[i]) {
value = ISODateTimeFormat.dateTime().print(Calcites.calciteDateToJoda((int) row[i], timeZone));
} else {
value = row[i];
}
jsonGenerator.writeObjectField(fieldList.get(i).getName(), value);
}
jsonGenerator.writeEndObject();
yielder = yielder.next(null);
}
jsonGenerator.writeEndArray();
jsonGenerator.flush();
// End with CRLF
outputStream.write('\r');
outputStream.write('\n');
} finally {
yielder.close();
}
}
}).build();
} catch (Throwable e) {
// make sure to close yielder if anything happened before starting to serialize the response.
yielder0.close();
throw Throwables.propagate(e);
}
} catch (Exception e) {
log.warn(e, "Failed to handle query: %s", sqlQuery);
final Exception exceptionToReport;
if (e instanceof RelOptPlanner.CannotPlanException) {
exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery());
} else {
exceptionToReport = e;
}
return Response.serverError().type(MediaType.APPLICATION_JSON_TYPE).entity(jsonMapper.writeValueAsBytes(QueryInterruptedException.wrapIfNeeded(exceptionToReport))).build();
}
}
use of org.apache.calcite.rel.type.RelDataTypeField in project druid by druid-io.
the class QueryMaker method executeTimeseries.
private Sequence<Object[]> executeTimeseries(final DruidQueryBuilder queryBuilder, final TimeseriesQuery query) {
final List<RelDataTypeField> fieldList = queryBuilder.getRowType().getFieldList();
final List<DimensionSpec> dimensions = queryBuilder.getGrouping().getDimensions();
final String timeOutputName = dimensions.isEmpty() ? null : Iterables.getOnlyElement(dimensions).getOutputName();
Hook.QUERY_PLAN.run(query);
return Sequences.map(query.run(walker, Maps.<String, Object>newHashMap()), new Function<Result<TimeseriesResultValue>, Object[]>() {
@Override
public Object[] apply(final Result<TimeseriesResultValue> result) {
final Map<String, Object> row = result.getValue().getBaseObject();
final Object[] retVal = new Object[fieldList.size()];
for (final RelDataTypeField field : fieldList) {
final String outputName = queryBuilder.getRowOrder().get(field.getIndex());
if (outputName.equals(timeOutputName)) {
retVal[field.getIndex()] = coerce(result.getTimestamp(), field.getType().getSqlTypeName());
} else {
retVal[field.getIndex()] = coerce(row.get(outputName), field.getType().getSqlTypeName());
}
}
return retVal;
}
});
}
use of org.apache.calcite.rel.type.RelDataTypeField in project druid by druid-io.
the class DruidStatement method createColumnMetaData.
public static List<ColumnMetaData> createColumnMetaData(final RelDataType rowType) {
final List<ColumnMetaData> columns = new ArrayList<>();
List<RelDataTypeField> fieldList = rowType.getFieldList();
for (int i = 0; i < fieldList.size(); i++) {
RelDataTypeField field = fieldList.get(i);
final ColumnMetaData.Rep rep = QueryMaker.rep(field.getType().getSqlTypeName());
final ColumnMetaData.ScalarType columnType = ColumnMetaData.scalar(field.getType().getSqlTypeName().getJdbcOrdinal(), field.getType().getSqlTypeName().getName(), rep);
columns.add(new ColumnMetaData(// ordinal
i, // auto increment
false, // case sensitive
true, // searchable
false, // currency
false, field.getType().isNullable() ? DatabaseMetaData.columnNullable : // nullable
DatabaseMetaData.columnNoNulls, // signed
true, // display size
field.getType().getPrecision(), // label
field.getName(), // column name
null, // schema name
null, // precision
field.getType().getPrecision(), // scale
field.getType().getScale(), // table name
null, // catalog name
null, // avatica type
columnType, // read only
true, // writable
false, // definitely writable
false, // column class name
columnType.columnClassName()));
}
return columns;
}
use of org.apache.calcite.rel.type.RelDataTypeField in project flink by apache.
the class FlinkRelDecorrelator method decorrelateRel.
/**
* Rewrite LogicalProject.
*
* @param rel the project rel to rewrite
*/
public Frame decorrelateRel(LogicalProject rel) {
//
// Rewrite logic:
//
// 1. Pass along any correlated variables coming from the input.
//
final RelNode oldInput = rel.getInput();
Frame frame = getInvoke(oldInput, rel);
if (frame == null) {
// If input has not been rewritten, do not rewrite this rel.
return null;
}
final List<RexNode> oldProjects = rel.getProjects();
final List<RelDataTypeField> relOutput = rel.getRowType().getFieldList();
// LogicalProject projects the original expressions,
// plus any correlated variables the input wants to pass along.
final List<Pair<RexNode, String>> projects = Lists.newArrayList();
// and produce the correlated variables in the new output.
if (cm.mapRefRelToCorVar.containsKey(rel)) {
decorrelateInputWithValueGenerator(rel);
// The old input should be mapped to the LogicalJoin created by
// rewriteInputWithValueGenerator().
frame = map.get(oldInput);
}
// LogicalProject projects the original expressions
final Map<Integer, Integer> mapOldToNewOutputPos = Maps.newHashMap();
int newPos;
for (newPos = 0; newPos < oldProjects.size(); newPos++) {
projects.add(newPos, Pair.of(decorrelateExpr(oldProjects.get(newPos)), relOutput.get(newPos).getName()));
mapOldToNewOutputPos.put(newPos, newPos);
}
// Project any correlated variables the input wants to pass along.
final SortedMap<Correlation, Integer> mapCorVarToOutputPos = new TreeMap<>();
for (Map.Entry<Correlation, Integer> entry : frame.corVarOutputPos.entrySet()) {
projects.add(RexInputRef.of2(entry.getValue(), frame.r.getRowType().getFieldList()));
mapCorVarToOutputPos.put(entry.getKey(), newPos);
newPos++;
}
RelNode newProject = RelOptUtil.createProject(frame.r, projects, false);
return register(rel, newProject, mapOldToNewOutputPos, mapCorVarToOutputPos);
}
use of org.apache.calcite.rel.type.RelDataTypeField in project flink by apache.
the class FlinkRelDecorrelator method decorrelateRel.
/**
* Rewrite Correlator into a left outer join.
*
* @param rel Correlator
*/
public Frame decorrelateRel(LogicalCorrelate rel) {
//
// Rewrite logic:
//
// The original left input will be joined with the new right input that
// has generated correlated variables propagated up. For any generated
// cor vars that are not used in the join key, pass them along to be
// joined later with the CorrelatorRels that produce them.
//
// the right input to Correlator should produce correlated variables
final RelNode oldLeft = rel.getInput(0);
final RelNode oldRight = rel.getInput(1);
final Frame leftFrame = getInvoke(oldLeft, rel);
final Frame rightFrame = getInvoke(oldRight, rel);
if (leftFrame == null || rightFrame == null) {
// If any input has not been rewritten, do not rewrite this rel.
return null;
}
if (rightFrame.corVarOutputPos.isEmpty()) {
return null;
}
assert rel.getRequiredColumns().cardinality() <= rightFrame.corVarOutputPos.keySet().size();
// Change correlator rel into a join.
// Join all the correlated variables produced by this correlator rel
// with the values generated and propagated from the right input
final SortedMap<Correlation, Integer> corVarOutputPos = new TreeMap<>(rightFrame.corVarOutputPos);
final List<RexNode> conditions = new ArrayList<>();
final List<RelDataTypeField> newLeftOutput = leftFrame.r.getRowType().getFieldList();
int newLeftFieldCount = newLeftOutput.size();
final List<RelDataTypeField> newRightOutput = rightFrame.r.getRowType().getFieldList();
for (Map.Entry<Correlation, Integer> rightOutputPos : Lists.newArrayList(corVarOutputPos.entrySet())) {
final Correlation corVar = rightOutputPos.getKey();
if (!corVar.corr.equals(rel.getCorrelationId())) {
continue;
}
final int newLeftPos = leftFrame.oldToNewOutputPos.get(corVar.field);
final int newRightPos = rightOutputPos.getValue();
conditions.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, RexInputRef.of(newLeftPos, newLeftOutput), new RexInputRef(newLeftFieldCount + newRightPos, newRightOutput.get(newRightPos).getType())));
// remove this cor var from output position mapping
corVarOutputPos.remove(corVar);
}
// vars that are not used in the join key.
for (Correlation corVar : corVarOutputPos.keySet()) {
int newPos = corVarOutputPos.get(corVar) + newLeftFieldCount;
corVarOutputPos.put(corVar, newPos);
}
// then add any cor var from the left input. Do not need to change
// output positions.
corVarOutputPos.putAll(leftFrame.corVarOutputPos);
// Create the mapping between the output of the old correlation rel
// and the new join rel
final Map<Integer, Integer> mapOldToNewOutputPos = Maps.newHashMap();
int oldLeftFieldCount = oldLeft.getRowType().getFieldCount();
int oldRightFieldCount = oldRight.getRowType().getFieldCount();
assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount;
// Left input positions are not changed.
mapOldToNewOutputPos.putAll(leftFrame.oldToNewOutputPos);
// Right input positions are shifted by newLeftFieldCount.
for (int i = 0; i < oldRightFieldCount; i++) {
mapOldToNewOutputPos.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputPos.get(i) + newLeftFieldCount);
}
final RexNode condition = RexUtil.composeConjunction(rexBuilder, conditions, false);
RelNode newJoin = LogicalJoin.create(leftFrame.r, rightFrame.r, condition, ImmutableSet.<CorrelationId>of(), rel.getJoinType().toJoinType());
return register(rel, newJoin, mapOldToNewOutputPos, corVarOutputPos);
}
Aggregations