Search in sources :

Example 1 with QueryResponse

use of com.linkedin.pinot.tools.scan.query.QueryResponse in project pinot by linkedin.

the class HybridClusterScanComparisonIntegrationTest method runQuery.

protected void runQuery(String pqlQuery, ScanBasedQueryProcessor scanBasedQueryProcessor, boolean displayStatus, String scanResult) throws Exception {
    JSONObject scanJson;
    if (scanResult == null) {
        QueryResponse scanResponse = scanBasedQueryProcessor.processQuery(pqlQuery);
        String scanRspStr = new ObjectMapper().writeValueAsString(scanResponse);
        if (_scanRspFileWriter != null) {
            if (scanRspStr.contains("\n")) {
                throw new RuntimeException("We don't handle new lines in json responses yet. The reader will parse newline as separator between query responses");
            }
            _scanRspFileWriter.write(scanRspStr + "\n");
        }
        scanJson = new JSONObject(scanRspStr);
    } else {
        scanJson = new JSONObject(scanResult);
    }
    JSONObject pinotJson = postQuery(pqlQuery);
    QueryComparison.setCompareNumDocs(false);
    try {
        QueryComparison.ComparisonStatus comparisonStatus = QueryComparison.compareWithEmpty(pinotJson, scanJson);
        if (comparisonStatus.equals(QueryComparison.ComparisonStatus.FAILED)) {
            _compareStatusFileWriter.write("\nQuery comparison failed for query " + _nQueriesRead + ":" + pqlQuery + "\n" + "Scan json: " + scanJson + "\n" + "Pinot json: " + pinotJson + "\n");
            _failedQueries.getAndIncrement();
        } else {
            _successfulQueries.getAndIncrement();
            if (comparisonStatus.equals(QueryComparison.ComparisonStatus.EMPTY)) {
                _emptyResults.getAndIncrement();
            } else if (_logMatchingResults) {
                _compareStatusFileWriter.write("\nMatched for query:" + pqlQuery + "\n" + scanJson + "\n");
            }
        }
        _compareStatusFileWriter.flush();
    } catch (Exception e) {
        _compareStatusFileWriter.write("Caught exception while running query comparison, failed for query " + pqlQuery + "\n" + "Scan json: " + scanJson + "\n" + "Pinot json: " + pinotJson + "\n");
        _failedQueries.getAndIncrement();
        _compareStatusFileWriter.flush();
    }
    int totalQueries = _successfulQueries.get() + _failedQueries.get();
    if (displayStatus || totalQueries % 5000 == 0) {
        doDisplayStatus(totalQueries);
    }
}
Also used : JSONObject(org.json.JSONObject) QueryResponse(com.linkedin.pinot.tools.scan.query.QueryResponse) QueryComparison(com.linkedin.pinot.tools.query.comparison.QueryComparison) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) IOException(java.io.IOException)

Example 2 with QueryResponse

use of com.linkedin.pinot.tools.scan.query.QueryResponse in project pinot by linkedin.

the class QueryComparison method runFunctionMode.

private void runFunctionMode() throws Exception {
    BufferedReader resultReader = null;
    ScanBasedQueryProcessor scanBasedQueryProcessor = null;
    try (BufferedReader queryReader = new BufferedReader(new InputStreamReader(new FileInputStream(_queryFile), "UTF8"))) {
        if (_resultFile == null) {
            scanBasedQueryProcessor = new ScanBasedQueryProcessor(_segmentsDir.getAbsolutePath());
        } else {
            resultReader = new BufferedReader(new InputStreamReader(new FileInputStream(_resultFile), "UTF8"));
        }
        int passed = 0;
        int total = 0;
        String query;
        while ((query = queryReader.readLine()) != null) {
            if (query.isEmpty() || query.startsWith("#")) {
                continue;
            }
            JSONObject expectedJson = null;
            try {
                if (resultReader != null) {
                    expectedJson = new JSONObject(resultReader.readLine());
                } else {
                    QueryResponse expectedResponse = scanBasedQueryProcessor.processQuery(query);
                    expectedJson = new JSONObject(new ObjectMapper().writeValueAsString(expectedResponse));
                }
            } catch (Exception e) {
                LOGGER.error("Comparison FAILED: Id: {} Exception caught while getting expected response for query: '{}'", total, query, e);
            }
            JSONObject actualJson = null;
            if (expectedJson != null) {
                try {
                    actualJson = new JSONObject(_clusterStarter.query(query));
                } catch (Exception e) {
                    LOGGER.error("Comparison FAILED: Id: {} Exception caught while running query: '{}'", total, query, e);
                }
            }
            if (expectedJson != null && actualJson != null) {
                try {
                    if (compare(actualJson, expectedJson)) {
                        passed++;
                        LOGGER.info("Comparison PASSED: Id: {} actual Time: {} ms expected Time: {} ms Docs Scanned: {}", total, actualJson.get(TIME_USED_MS), expectedJson.get(TIME_USED_MS), actualJson.get(NUM_DOCS_SCANNED));
                        LOGGER.debug("actual Response: {}", actualJson);
                        LOGGER.debug("expected Response: {}", expectedJson);
                    } else {
                        LOGGER.error("Comparison FAILED: Id: {} query: {}", query);
                        LOGGER.info("actual Response: {}", actualJson);
                        LOGGER.info("expected Response: {}", expectedJson);
                    }
                } catch (Exception e) {
                    LOGGER.error("Comparison FAILED: Id: {} Exception caught while comparing query: '{}' actual response: {}, expected response: {}", total, query, actualJson, expectedJson, e);
                }
            }
            total++;
        }
        LOGGER.info("Total {} out of {} queries passed.", passed, total);
    } finally {
        if (resultReader != null) {
            resultReader.close();
        }
    }
}
Also used : ScanBasedQueryProcessor(com.linkedin.pinot.tools.scan.query.ScanBasedQueryProcessor) InputStreamReader(java.io.InputStreamReader) JSONObject(org.json.JSONObject) QueryResponse(com.linkedin.pinot.tools.scan.query.QueryResponse) BufferedReader(java.io.BufferedReader) FileInputStream(java.io.FileInputStream) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) JSONException(org.json.JSONException)

Example 3 with QueryResponse

use of com.linkedin.pinot.tools.scan.query.QueryResponse in project pinot by linkedin.

the class HybridClusterScanComparisonIntegrationTest method runTestLoop.

protected void runTestLoop(Callable<Object> testMethod, boolean useMultipleThreads) throws Exception {
    // Clean up the Kafka topic
    // TODO jfim: Re-enable this once PINOT-2598 is fixed
    // purgeKafkaTopicAndResetRealtimeTable();
    List<Pair<File, File>> enabledRealtimeSegments = new ArrayList<>();
    // Sort the realtime segments based on their segment name so they get added from earliest to latest
    TreeMap<File, File> sortedRealtimeSegments = new TreeMap<File, File>(new Comparator<File>() {

        @Override
        public int compare(File o1, File o2) {
            return _realtimeAvroToSegmentMap.get(o1).getName().compareTo(_realtimeAvroToSegmentMap.get(o2).getName());
        }
    });
    sortedRealtimeSegments.putAll(_realtimeAvroToSegmentMap);
    for (File avroFile : sortedRealtimeSegments.keySet()) {
        enabledRealtimeSegments.add(Pair.of(avroFile, sortedRealtimeSegments.get(avroFile)));
        if (useMultipleThreads) {
            _queryExecutor = new ThreadPoolExecutor(4, 4, 5, TimeUnit.SECONDS, new ArrayBlockingQueue<Runnable>(50), new ThreadPoolExecutor.CallerRunsPolicy());
        }
        // Push avro for the new segment
        LOGGER.info("Pushing Avro file {} into Kafka", avroFile);
        pushAvroIntoKafka(Collections.singletonList(avroFile), KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KAFKA_TOPIC);
        // Configure the scan based comparator to use the distinct union of the offline and realtime segments
        configureScanBasedComparator(enabledRealtimeSegments);
        QueryResponse queryResponse = _scanBasedQueryProcessor.processQuery("select count(*) from mytable");
        int expectedRecordCount = queryResponse.getNumDocsScanned();
        waitForRecordCountToStabilizeToExpectedCount(expectedRecordCount, System.currentTimeMillis() + getStabilizationTimeMs());
        // Run the actual tests
        LOGGER.info("Running queries");
        testMethod.call();
        if (useMultipleThreads) {
            if (_nQueriesRead == -1) {
                _queryExecutor.shutdown();
                _queryExecutor.awaitTermination(5, TimeUnit.MINUTES);
            } else {
                int totalQueries = _failedQueries.get() + _successfulQueries.get();
                while (totalQueries < _nQueriesRead) {
                    LOGGER.info("Completed " + totalQueries + " out of " + _nQueriesRead + " - waiting");
                    Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS);
                    totalQueries = _failedQueries.get() + _successfulQueries.get();
                }
                if (totalQueries > _nQueriesRead) {
                    throw new RuntimeException("Executed " + totalQueries + " more than " + _nQueriesRead);
                }
                _queryExecutor.shutdown();
            }
        }
        int totalQueries = _failedQueries.get() + _successfulQueries.get();
        doDisplayStatus(totalQueries);
        // Release resources
        _scanBasedQueryProcessor.close();
        _compareStatusFileWriter.write("Status after push of " + avroFile + ":" + System.currentTimeMillis() + ":Executed " + _nQueriesRead + " queries, " + _failedQueries + " failures," + _emptyResults.get() + " empty results\n");
    }
}
Also used : ArrayList(java.util.ArrayList) TreeMap(java.util.TreeMap) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) QueryResponse(com.linkedin.pinot.tools.scan.query.QueryResponse) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) File(java.io.File) Pair(org.apache.commons.lang3.tuple.Pair)

Aggregations

QueryResponse (com.linkedin.pinot.tools.scan.query.QueryResponse)3 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)2 JSONObject (org.json.JSONObject)2 QueryComparison (com.linkedin.pinot.tools.query.comparison.QueryComparison)1 ScanBasedQueryProcessor (com.linkedin.pinot.tools.scan.query.ScanBasedQueryProcessor)1 BufferedReader (java.io.BufferedReader)1 File (java.io.File)1 FileInputStream (java.io.FileInputStream)1 IOException (java.io.IOException)1 InputStreamReader (java.io.InputStreamReader)1 ArrayList (java.util.ArrayList)1 TreeMap (java.util.TreeMap)1 ArrayBlockingQueue (java.util.concurrent.ArrayBlockingQueue)1 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)1 Pair (org.apache.commons.lang3.tuple.Pair)1 JSONException (org.json.JSONException)1