Search in sources :

Example 1 with Row

use of org.apache.calcite.interpreter.Row in project calcite by apache.

the class DruidConnectionImpl method parse.

/**
 * Parses the output of a query, sending the results to a
 * {@link Sink}.
 */
private void parse(QueryType queryType, InputStream in, Sink sink, List<String> fieldNames, List<ColumnMetaData.Rep> fieldTypes, Page page) {
    final JsonFactory factory = new JsonFactory();
    final Row.RowBuilder rowBuilder = Row.newBuilder(fieldNames.size());
    if (CalcitePrepareImpl.DEBUG) {
        try {
            final byte[] bytes = AvaticaUtils.readFullyToBytes(in);
            System.out.println("Response: " + new String(bytes, StandardCharsets.UTF_8));
            in = new ByteArrayInputStream(bytes);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
    int posTimestampField = -1;
    for (int i = 0; i < fieldTypes.size(); i++) {
        /*@TODO This need to be revisited. The logic seems implying that only
      one column of type timestamp is present, this is not necessarily true,
      see https://issues.apache.org/jira/browse/CALCITE-2175
      */
        if (fieldTypes.get(i) == ColumnMetaData.Rep.JAVA_SQL_TIMESTAMP) {
            posTimestampField = i;
            break;
        }
    }
    try (final JsonParser parser = factory.createParser(in)) {
        switch(queryType) {
            case TIMESERIES:
                if (parser.nextToken() == JsonToken.START_ARRAY) {
                    while (parser.nextToken() == JsonToken.START_OBJECT) {
                        // loop until token equal to "}"
                        final Long timeValue = extractTimestampField(parser);
                        if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result") && parser.nextToken() == JsonToken.START_OBJECT) {
                            if (posTimestampField != -1) {
                                rowBuilder.set(posTimestampField, timeValue);
                            }
                            parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                            sink.send(rowBuilder.build());
                            rowBuilder.reset();
                        }
                        expect(parser, JsonToken.END_OBJECT);
                    }
                }
                break;
            case TOP_N:
                if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                    final Long timeValue = extractTimestampField(parser);
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result") && parser.nextToken() == JsonToken.START_ARRAY) {
                        while (parser.nextToken() == JsonToken.START_OBJECT) {
                            // loop until token equal to "}"
                            if (posTimestampField != -1) {
                                rowBuilder.set(posTimestampField, timeValue);
                            }
                            parseFields(fieldNames, fieldTypes, rowBuilder, parser);
                            sink.send(rowBuilder.build());
                            rowBuilder.reset();
                        }
                    }
                }
                break;
            case SELECT:
                if (parser.nextToken() == JsonToken.START_ARRAY && parser.nextToken() == JsonToken.START_OBJECT) {
                    page.pagingIdentifier = null;
                    page.offset = -1;
                    page.totalRowCount = 0;
                    expectScalarField(parser, DEFAULT_RESPONSE_TIMESTAMP_COLUMN);
                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("result") && parser.nextToken() == JsonToken.START_OBJECT) {
                        while (parser.nextToken() == JsonToken.FIELD_NAME) {
                            if (parser.getCurrentName().equals("pagingIdentifiers") && parser.nextToken() == JsonToken.START_OBJECT) {
                                JsonToken token = parser.nextToken();
                                while (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
                                    page.pagingIdentifier = parser.getCurrentName();
                                    if (parser.nextToken() == JsonToken.VALUE_NUMBER_INT) {
                                        page.offset = parser.getIntValue();
                                    }
                                    token = parser.nextToken();
                                }
                                expect(token, JsonToken.END_OBJECT);
                            } else if (parser.getCurrentName().equals("events") && parser.nextToken() == JsonToken.START_ARRAY) {
                                while (parser.nextToken() == JsonToken.START_OBJECT) {
                                    expectScalarField(parser, "segmentId");
                                    expectScalarField(parser, "offset");
                                    if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event") && parser.nextToken() == JsonToken.START_OBJECT) {
                                        parseFields(fieldNames, fieldTypes, posTimestampField, rowBuilder, parser);
                                        sink.send(rowBuilder.build());
                                        rowBuilder.reset();
                                        page.totalRowCount += 1;
                                    }
                                    expect(parser, JsonToken.END_OBJECT);
                                }
                                parser.nextToken();
                            } else if (parser.getCurrentName().equals("dimensions") || parser.getCurrentName().equals("metrics")) {
                                expect(parser, JsonToken.START_ARRAY);
                                while (parser.nextToken() != JsonToken.END_ARRAY) {
                                // empty
                                }
                            }
                        }
                    }
                }
                break;
            case GROUP_BY:
                if (parser.nextToken() == JsonToken.START_ARRAY) {
                    while (parser.nextToken() == JsonToken.START_OBJECT) {
                        expectScalarField(parser, "version");
                        final Long timeValue = extractTimestampField(parser);
                        if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("event") && parser.nextToken() == JsonToken.START_OBJECT) {
                            if (posTimestampField != -1) {
                                rowBuilder.set(posTimestampField, timeValue);
                            }
                            parseFields(fieldNames, fieldTypes, posTimestampField, rowBuilder, parser);
                            sink.send(rowBuilder.build());
                            rowBuilder.reset();
                        }
                        expect(parser, JsonToken.END_OBJECT);
                    }
                }
                break;
            case SCAN:
                if (parser.nextToken() == JsonToken.START_ARRAY) {
                    while (parser.nextToken() == JsonToken.START_OBJECT) {
                        expectScalarField(parser, "segmentId");
                        expect(parser, JsonToken.FIELD_NAME);
                        if (parser.getCurrentName().equals("columns")) {
                            expect(parser, JsonToken.START_ARRAY);
                            while (parser.nextToken() != JsonToken.END_ARRAY) {
                            // Skip the columns list
                            }
                        }
                        if (parser.nextToken() == JsonToken.FIELD_NAME && parser.getCurrentName().equals("events") && parser.nextToken() == JsonToken.START_ARRAY) {
                            // Events is Array of Arrays where each array is a row
                            while (parser.nextToken() == JsonToken.START_ARRAY) {
                                for (String field : fieldNames) {
                                    parseFieldForName(fieldNames, fieldTypes, posTimestampField, rowBuilder, parser, field);
                                }
                                expect(parser, JsonToken.END_ARRAY);
                                Row row = rowBuilder.build();
                                sink.send(row);
                                rowBuilder.reset();
                                page.totalRowCount += 1;
                            }
                        }
                        expect(parser, JsonToken.END_OBJECT);
                    }
                }
        }
    } catch (IOException | InterruptedException e) {
        throw new RuntimeException(e);
    }
}
Also used : JsonFactory(com.fasterxml.jackson.core.JsonFactory) IOException(java.io.IOException) ByteArrayInputStream(java.io.ByteArrayInputStream) JsonToken(com.fasterxml.jackson.core.JsonToken) Row(org.apache.calcite.interpreter.Row) JsonParser(com.fasterxml.jackson.core.JsonParser)

Example 2 with Row

use of org.apache.calcite.interpreter.Row in project calcite by apache.

the class DruidConnectionImpl method enumerable.

/**
 * Executes a request and returns the resulting rows as an
 * {@link Enumerable}, running the parser in a thread provided by
 * {@code service}.
 */
public Enumerable<Row> enumerable(final QueryType queryType, final String request, final List<String> fieldNames, final ExecutorService service) throws IOException {
    return new AbstractEnumerable<Row>() {

        public Enumerator<Row> enumerator() {
            final BlockingQueueEnumerator<Row> enumerator = new BlockingQueueEnumerator<>();
            final RunnableQueueSink sink = new RunnableQueueSink() {

                public void send(Row row) throws InterruptedException {
                    enumerator.queue.put(row);
                }

                public void end() {
                    enumerator.done.set(true);
                }

                @SuppressWarnings("deprecation")
                public void setSourceEnumerable(Enumerable<Row> enumerable) throws InterruptedException {
                    for (Row row : enumerable) {
                        send(row);
                    }
                    end();
                }

                public void run() {
                    try {
                        final Page page = new Page();
                        final List<ColumnMetaData.Rep> fieldTypes = Collections.nCopies(fieldNames.size(), null);
                        request(queryType, request, this, fieldNames, fieldTypes, page);
                        enumerator.done.set(true);
                    } catch (Throwable e) {
                        enumerator.throwableHolder.set(e);
                        enumerator.done.set(true);
                    }
                }
            };
            service.execute(sink);
            return enumerator;
        }
    };
}
Also used : AbstractEnumerable(org.apache.calcite.linq4j.AbstractEnumerable) Enumerable(org.apache.calcite.linq4j.Enumerable) AbstractEnumerable(org.apache.calcite.linq4j.AbstractEnumerable) Row(org.apache.calcite.interpreter.Row)

Aggregations

Row (org.apache.calcite.interpreter.Row)2 JsonFactory (com.fasterxml.jackson.core.JsonFactory)1 JsonParser (com.fasterxml.jackson.core.JsonParser)1 JsonToken (com.fasterxml.jackson.core.JsonToken)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 IOException (java.io.IOException)1 AbstractEnumerable (org.apache.calcite.linq4j.AbstractEnumerable)1 Enumerable (org.apache.calcite.linq4j.Enumerable)1