use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestJmxSplitManager method testNoPredicate.
@Test
public void testNoPredicate() throws Exception {
JmxTableHandle tableHandle = new JmxTableHandle(new SchemaTableName("schema", "tableName"), ImmutableList.of("objectName"), ImmutableList.of(columnHandle), true, TupleDomain.all());
ConnectorSplitSource splitSource = splitManager.getSplits(JmxTransactionHandle.INSTANCE, SESSION, tableHandle, UNGROUPED_SCHEDULING, DynamicFilter.EMPTY);
List<ConnectorSplit> allSplits = getAllSplits(splitSource);
assertEquals(allSplits.size(), nodes.size());
Set<String> actualNodes = nodes.stream().map(Node::getNodeIdentifier).collect(toSet());
Set<String> expectedNodes = new HashSet<>();
for (ConnectorSplit split : allSplits) {
List<HostAddress> addresses = split.getAddresses();
assertEquals(addresses.size(), 1);
expectedNodes.add(addresses.get(0).getHostText());
}
assertEquals(actualNodes, expectedNodes);
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestPrometheusSplit method testQueryDividedIntoSplitsShouldHaveCorrectSpacingBetweenTimes.
@Test
public void testQueryDividedIntoSplitsShouldHaveCorrectSpacingBetweenTimes() {
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TESTING_TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(null, null, new PrometheusTableHandle("default", table.getName()), null, (DynamicFilter) null);
PrometheusSplit split1 = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
Map<String, String> paramsMap1 = parse(URI.create(split1.getUri()), StandardCharsets.UTF_8).stream().collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue));
PrometheusSplit split2 = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
Map<String, String> paramsMap2 = parse(URI.create(split2.getUri()), StandardCharsets.UTF_8).stream().collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue));
assertEquals(paramsMap1.get("query"), "up[1d]");
assertEquals(paramsMap2.get("query"), "up[1d]");
long diff = Double.valueOf(paramsMap2.get("time")).longValue() - Double.valueOf(paramsMap1.get("time")).longValue();
assertEquals(config.getQueryChunkSizeDuration().getValue(TimeUnit.SECONDS), diff, 0.0001);
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestPrometheusSplit method testQueryWithTableNameNeedingURLEncodeInSplits.
@Test
public void testQueryWithTableNameNeedingURLEncodeInSplits() throws URISyntaxException {
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prom-metrics-non-standard-name.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TESTING_TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up now");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(null, null, new PrometheusTableHandle("default", table.getName()), null, (DynamicFilter) null);
PrometheusSplit split = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
String queryInSplit = URI.create(split.getUri()).getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli() - config.getMaxQueryRangeDuration().toMillis() + config.getQueryChunkSizeDuration().toMillis() - OFFSET_MILLIS * 20);
assertEquals(queryInSplit, new URI("http://doesnotmatter:9090/api/v1/query?query=up+now[" + getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" + "&time=" + timeShouldBe).getQuery());
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestPrometheusSplit method testQueryDividedIntoSplitsFirstSplitHasRightTime.
@Test
public void testQueryDividedIntoSplitsFirstSplitHasRightTime() throws URISyntaxException {
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TESTING_TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splits = splitManager.getSplits(null, null, new PrometheusTableHandle("default", table.getName()), null, (DynamicFilter) null);
PrometheusSplit split = (PrometheusSplit) splits.getNextBatch(NOT_PARTITIONED, 1).getNow(null).getSplits().get(0);
String queryInSplit = URI.create(split.getUri()).getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli() - config.getMaxQueryRangeDuration().toMillis() + config.getQueryChunkSizeDuration().toMillis() - OFFSET_MILLIS * 20);
assertEquals(queryInSplit, new URI("http://doesnotmatter:9090/api/v1/query?query=up[" + getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" + "&time=" + timeShouldBe).getQuery());
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class PhoenixSplitManager method getSplits.
@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter) {
JdbcTableHandle tableHandle = (JdbcTableHandle) table;
try (Connection connection = phoenixClient.getConnection(session)) {
List<JdbcColumnHandle> columns = tableHandle.getColumns().map(columnSet -> columnSet.stream().map(JdbcColumnHandle.class::cast).collect(toList())).orElseGet(() -> phoenixClient.getColumns(session, tableHandle));
PhoenixPreparedStatement inputQuery = (PhoenixPreparedStatement) phoenixClient.prepareStatement(session, connection, tableHandle, columns, Optional.empty());
int maxScansPerSplit = session.getProperty(PhoenixSessionProperties.MAX_SCANS_PER_SPLIT, Integer.class);
List<ConnectorSplit> splits = getSplits(inputQuery, maxScansPerSplit).stream().map(PhoenixInputSplit.class::cast).map(split -> new PhoenixSplit(getSplitAddresses(split), SerializedPhoenixInputSplit.serialize(split))).collect(toImmutableList());
return new FixedSplitSource(splits);
} catch (IOException | SQLException e) {
throw new TrinoException(PHOENIX_SPLIT_ERROR, "Couldn't get Phoenix splits", e);
}
}
Aggregations