use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestPinotSplitManager method getSplitsHelper.
private List<PinotSplit> getSplitsHelper(PinotTableHandle pinotTable, int numSegmentsPerSplit, boolean forbidSegmentQueries) {
ConnectorSession session = createSessionWithNumSplits(numSegmentsPerSplit, forbidSegmentQueries, pinotConfig);
ConnectorSplitSource splitSource = pinotSplitManager.getSplits(null, session, pinotTable, UNGROUPED_SCHEDULING, DynamicFilter.EMPTY);
List<PinotSplit> splits = new ArrayList<>();
while (!splitSource.isFinished()) {
splits.addAll(getFutureValue(splitSource.getNextBatch(NOT_PARTITIONED, 1000)).getSplits().stream().map(s -> (PinotSplit) s).collect(toList()));
}
return splits;
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class PhoenixSplitManager method getSplits.
@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter) {
JdbcTableHandle tableHandle = (JdbcTableHandle) table;
try (Connection connection = phoenixClient.getConnection(session)) {
List<JdbcColumnHandle> columns = tableHandle.getColumns().map(columnSet -> columnSet.stream().map(JdbcColumnHandle.class::cast).collect(toList())).orElseGet(() -> phoenixClient.getColumns(session, tableHandle));
PhoenixPreparedStatement inputQuery = (PhoenixPreparedStatement) phoenixClient.prepareStatement(session, connection, tableHandle, columns, Optional.empty());
int maxScansPerSplit = session.getProperty(PhoenixSessionProperties.MAX_SCANS_PER_SPLIT, Integer.class);
List<ConnectorSplit> splits = getSplits(inputQuery, maxScansPerSplit).stream().map(PhoenixInputSplit.class::cast).map(split -> new PhoenixSplit(getSplitAddresses(split), SerializedPhoenixInputSplit.serialize(split))).collect(toImmutableList());
return new FixedSplitSource(splits);
} catch (IOException | SQLException e) {
throw new TrinoException(PHOENIX_SPLIT_ERROR, "Couldn't get Phoenix splits", e);
}
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestPrometheusSplit method testQueryDividedIntoSplitsLastSplitHasRightTime.
@Test
public void testQueryDividedIntoSplitsLastSplitHasRightTime() throws URISyntaxException {
Instant now = LocalDateTime.of(2019, 10, 2, 7, 26, 56, 0).toInstant(UTC);
PrometheusConnectorConfig config = getCommonConfig(prometheusHttpServer.resolve("/prometheus-data/prometheus-metrics.json"));
PrometheusClient client = new PrometheusClient(config, METRIC_CODEC, TESTING_TYPE_MANAGER);
PrometheusTable table = client.getTable("default", "up");
PrometheusSplitManager splitManager = new PrometheusSplitManager(client, fixedClockAt(now), config);
ConnectorSplitSource splitsMaybe = splitManager.getSplits(null, null, new PrometheusTableHandle("default", table.getName()), null, (DynamicFilter) null);
List<ConnectorSplit> splits = splitsMaybe.getNextBatch(NOT_PARTITIONED, NUMBER_MORE_THAN_EXPECTED_NUMBER_SPLITS).getNow(null).getSplits();
int lastSplitIndex = splits.size() - 1;
PrometheusSplit lastSplit = (PrometheusSplit) splits.get(lastSplitIndex);
String queryInSplit = URI.create(lastSplit.getUri()).getQuery();
String timeShouldBe = decimalSecondString(now.toEpochMilli());
URI uriAsFormed = new URI("http://doesnotmatter:9090/api/v1/query?query=up[" + getQueryChunkSizeDurationAsPrometheusCompatibleDurationString(config) + "]" + "&time=" + timeShouldBe);
assertEquals(queryInSplit, uriAsFormed.getQuery());
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestJmxSplitManager method testPredicatePushdown.
@Test
public void testPredicatePushdown() throws Exception {
for (Node node : nodes) {
String nodeIdentifier = node.getNodeIdentifier();
TupleDomain<ColumnHandle> nodeTupleDomain = TupleDomain.fromFixedValues(ImmutableMap.of(columnHandle, NullableValue.of(createUnboundedVarcharType(), utf8Slice(nodeIdentifier))));
JmxTableHandle tableHandle = new JmxTableHandle(new SchemaTableName("schema", "tableName"), ImmutableList.of("objectName"), ImmutableList.of(columnHandle), true, nodeTupleDomain);
ConnectorSplitSource splitSource = splitManager.getSplits(JmxTransactionHandle.INSTANCE, SESSION, tableHandle, UNGROUPED_SCHEDULING, DynamicFilter.EMPTY);
List<ConnectorSplit> allSplits = getAllSplits(splitSource);
assertEquals(allSplits.size(), 1);
assertEquals(allSplits.get(0).getAddresses().size(), 1);
assertEquals(allSplits.get(0).getAddresses().get(0).getHostText(), nodeIdentifier);
}
}
use of io.trino.spi.connector.ConnectorSplitSource in project trino by trinodb.
the class TestJmxSplitManager method getRecordSet.
private RecordSet getRecordSet(SchemaTableName schemaTableName) throws Exception {
JmxTableHandle tableHandle = metadata.getTableHandle(SESSION, schemaTableName);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(SESSION, tableHandle).values());
ConnectorSplitSource splitSource = splitManager.getSplits(JmxTransactionHandle.INSTANCE, SESSION, tableHandle, UNGROUPED_SCHEDULING, DynamicFilter.EMPTY);
List<ConnectorSplit> allSplits = getAllSplits(splitSource);
assertEquals(allSplits.size(), nodes.size());
ConnectorSplit split = allSplits.get(0);
return recordSetProvider.getRecordSet(JmxTransactionHandle.INSTANCE, SESSION, split, tableHandle, columnHandles);
}
Aggregations