Search in sources :

Example 11 with BatchedQueryResult

use of io.confluent.ksql.api.client.BatchedQueryResult in project ksql by confluentinc.

the class MigrationInfoCommandTest method givenMigrations.

/**
 * @param appliedVersions applied versions, in the order they were applied
 * @param states corresponding migration states (ordered according to {@code versions})
 * @param errorReasons corresponding error reasons (ordered according to {@code versions})
 * @param unappliedVersions (additional) existing versions, that have not been applied
 * @param multiKeyPullQuerySupported whether the server version supports multi-key pull queries
 */
private void givenMigrations(final List<String> appliedVersions, final List<MigrationState> states, final List<String> errorReasons, final List<String> unappliedVersions, final boolean multiKeyPullQuerySupported) throws Exception {
    givenExistingMigrationFiles(appliedVersions);
    givenExistingMigrationFiles(unappliedVersions);
    givenCurrentMigrationVersion(appliedVersions.size() > 0 ? appliedVersions.get(appliedVersions.size() - 1) : MetadataUtil.NONE_VERSION);
    final List<Row> appliedRows = new ArrayList<>();
    for (int i = 0; i < appliedVersions.size(); i++) {
        String version = appliedVersions.get(i);
        String prevVersion = i > 0 ? appliedVersions.get(i - 1) : MetadataUtil.NONE_VERSION;
        Row row = mock(Row.class);
        when(row.getString(1)).thenReturn(version);
        when(row.getString(2)).thenReturn("checksum");
        when(row.getString(3)).thenReturn(prevVersion);
        when(row.getString(4)).thenReturn(states.get(i).toString());
        when(row.getString(5)).thenReturn(fileDescriptionForVersion(version));
        when(row.getString(6)).thenReturn("N/A");
        when(row.getString(7)).thenReturn("N/A");
        when(row.getString(8)).thenReturn(errorReasons.get(i));
        appliedRows.add(row);
    }
    if (multiKeyPullQuerySupported) {
        BatchedQueryResult queryResult = mock(BatchedQueryResult.class);
        when(ksqlClient.executeQuery("SELECT version, checksum, previous, state, name, started_on, completed_on, error_reason " + "FROM " + MIGRATIONS_TABLE + " WHERE version_key IN ('" + Stream.concat(appliedVersions.stream(), unappliedVersions.stream()).collect(Collectors.joining("', '")) + "');")).thenReturn(queryResult);
        when(queryResult.get()).thenReturn(appliedRows);
    } else {
        for (int i = 0; i < appliedVersions.size(); i++) {
            BatchedQueryResult queryResult = mock(BatchedQueryResult.class);
            when(ksqlClient.executeQuery("SELECT version, checksum, previous, state, name, started_on, completed_on, error_reason FROM " + MIGRATIONS_TABLE + " WHERE version_key = '" + appliedVersions.get(i) + "';")).thenReturn(queryResult);
            when(queryResult.get()).thenReturn(ImmutableList.of(appliedRows.get(i)));
        }
        for (String version : unappliedVersions) {
            BatchedQueryResult queryResult = mock(BatchedQueryResult.class);
            when(ksqlClient.executeQuery("SELECT version, checksum, previous, state, name, started_on, completed_on, error_reason FROM " + MIGRATIONS_TABLE + " WHERE version_key = '" + version + "';")).thenReturn(queryResult);
            when(queryResult.get()).thenReturn(ImmutableList.of());
        }
    }
}
Also used : ArrayList(java.util.ArrayList) Row(io.confluent.ksql.api.client.Row) Matchers.containsString(org.hamcrest.Matchers.containsString) BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult)

Example 12 with BatchedQueryResult

use of io.confluent.ksql.api.client.BatchedQueryResult in project ksql by confluentinc.

the class ClientMutationIntegrationTest method shouldExecuteQueryWithProperties.

@Test
public void shouldExecuteQueryWithProperties() {
    // Given
    final Map<String, Object> properties = new HashMap<>();
    properties.put("auto.offset.reset", "latest");
    final String sql = "SELECT * FROM " + TEST_STREAM + " EMIT CHANGES LIMIT 1;";
    final KsqlObject insertRow = new KsqlObject().put("K", new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))).put("STR", "Value_shouldExecuteQueryWithProperties").put("LONG", 2000L).put("DEC", new BigDecimal("12.34")).put("BYTES_", new byte[] { 0, 1, 2 }).put("ARRAY", new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")).put("MAP", new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")).put("STRUCT", new KsqlObject().put("F1", 4)).put("COMPLEX", COMPLEX_FIELD_VALUE).put("TIMESTAMP", "1970-01-01T00:00:00.001").put("DATE", "1970-01-01").put("TIME", "00:00:00");
    // When
    final BatchedQueryResult queryResult = client.executeQuery(sql, properties);
    // Then: a newly inserted row arrives
    // Wait for row to arrive
    final AtomicReference<Row> rowRef = new AtomicReference<>();
    new Thread(() -> {
        try {
            final List<Row> rows = queryResult.get();
            assertThat(rows, hasSize(1));
            rowRef.set(rows.get(0));
        } catch (final Exception e) {
            throw new RuntimeException(e);
        }
    }).start();
    // Insert a new row
    final Row row = assertThatEventually(() -> {
        // Potentially try inserting multiple times, in case the query wasn't started by the first time
        try {
            client.insertInto(TEST_STREAM, insertRow).get();
        } catch (final Exception e) {
            throw new RuntimeException(e);
        }
        return rowRef.get();
    }, is(notNullValue()));
    // Verify received row
    assertThat(row.getKsqlObject("K"), is(new KsqlObject().put("F1", new KsqlArray().add("my_key_shouldExecuteQueryWithProperties"))));
    assertThat(row.getString("STR"), is("Value_shouldExecuteQueryWithProperties"));
    assertThat(row.getLong("LONG"), is(2000L));
    assertThat(row.getDecimal("DEC"), is(new BigDecimal("12.34")));
    assertThat(row.getBytes("BYTES_"), is(new byte[] { 0, 1, 2 }));
    assertThat(row.getKsqlArray("ARRAY"), is(new KsqlArray().add("v1_shouldExecuteQueryWithProperties").add("v2_shouldExecuteQueryWithProperties")));
    assertThat(row.getKsqlObject("MAP"), is(new KsqlObject().put("test_name", "shouldExecuteQueryWithProperties")));
    assertThat(row.getKsqlObject("STRUCT"), is(new KsqlObject().put("F1", 4)));
    assertThat(row.getKsqlObject("COMPLEX"), is(EXPECTED_COMPLEX_FIELD_VALUE));
    assertThat(row.getString("TIMESTAMP"), is("1970-01-01T00:00:00.001"));
    assertThat(row.getString("DATE"), is("1970-01-01"));
    assertThat(row.getString("TIME"), is("00:00"));
}
Also used : HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) Matchers.containsString(org.hamcrest.Matchers.containsString) BigDecimal(java.math.BigDecimal) ZooKeeperClientException(kafka.zookeeper.ZooKeeperClientException) KsqlClientException(io.confluent.ksql.api.client.exception.KsqlClientException) ExecutionException(java.util.concurrent.ExecutionException) KsqlObject(io.confluent.ksql.api.client.KsqlObject) KsqlObject(io.confluent.ksql.api.client.KsqlObject) ConnectorList(io.confluent.ksql.rest.entity.ConnectorList) List(java.util.List) Row(io.confluent.ksql.api.client.Row) KsqlArray(io.confluent.ksql.api.client.KsqlArray) BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 13 with BatchedQueryResult

use of io.confluent.ksql.api.client.BatchedQueryResult in project ksql by confluentinc.

the class ClientIntegrationTest method shouldHandleErrorResponseFromExecuteQuery.

@Test
public void shouldHandleErrorResponseFromExecuteQuery() {
    // When
    final BatchedQueryResult batchedQueryResult = client.executeQuery("SELECT * from A_FAKE_TABLE_NAME;");
    final Exception e = assertThrows(// thrown from .get() when the future completes exceptionally
    ExecutionException.class, batchedQueryResult::get);
    // Then
    assertThat(e.getCause(), instanceOf(KsqlClientException.class));
    assertThat(e.getCause().getMessage(), containsString("Received 400 response from server"));
    assertThat(e.getCause().getMessage(), containsString("A_FAKE_TABLE_NAME does not exist."));
    // queryID future should also be completed exceptionally
    final Exception queryIdException = assertThrows(// thrown from .get() when the future completes exceptionally
    ExecutionException.class, () -> batchedQueryResult.queryID().get());
    assertThat(queryIdException.getCause(), instanceOf(KsqlClientException.class));
    assertThat(queryIdException.getCause().getMessage(), containsString("Received 400 response from server"));
    assertThat(queryIdException.getCause().getMessage(), containsString("A_FAKE_TABLE_NAME does not exist."));
}
Also used : KsqlClientException(io.confluent.ksql.api.client.exception.KsqlClientException) ZooKeeperClientException(kafka.zookeeper.ZooKeeperClientException) KsqlClientException(io.confluent.ksql.api.client.exception.KsqlClientException) ExecutionException(java.util.concurrent.ExecutionException) BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Example 14 with BatchedQueryResult

use of io.confluent.ksql.api.client.BatchedQueryResult in project ksql by confluentinc.

the class ClientIntegrationTest method shouldExecutePullQueryWithVariables.

@Test
public void shouldExecutePullQueryWithVariables() throws Exception {
    // When
    client.define("AGG_TABLE", AGG_TABLE);
    client.define("value", false);
    final BatchedQueryResult batchedQueryResult = client.executeQuery("SELECT ${value} from ${AGG_TABLE} WHERE K=STRUCT(F1 := ARRAY['a']);");
    // Then
    assertThat(batchedQueryResult.queryID().get(), is(notNullValue()));
    assertThat(batchedQueryResult.get().get(0).getBoolean(1), is(false));
}
Also used : BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Example 15 with BatchedQueryResult

use of io.confluent.ksql.api.client.BatchedQueryResult in project ksql by confluentinc.

the class ClientIntegrationTest method shouldExecutePushQueryWithVariables.

@Test
public void shouldExecutePushQueryWithVariables() throws Exception {
    // When
    client.define("TEST_STREAM", TEST_STREAM);
    client.define("number", 4567);
    final BatchedQueryResult batchedQueryResult = client.executeQuery("SELECT ${number} FROM ${TEST_STREAM} EMIT CHANGES LIMIT " + PUSH_QUERY_LIMIT_NUM_ROWS + ";");
    // Then
    assertThat(batchedQueryResult.queryID().get(), is(notNullValue()));
    assertThat(batchedQueryResult.get().get(0).getInteger(1), is(4567));
}
Also used : BatchedQueryResult(io.confluent.ksql.api.client.BatchedQueryResult) Test(org.junit.Test) IntegrationTest(io.confluent.common.utils.IntegrationTest)

Aggregations

BatchedQueryResult (io.confluent.ksql.api.client.BatchedQueryResult)16 Test (org.junit.Test)9 Row (io.confluent.ksql.api.client.Row)8 IntegrationTest (io.confluent.common.utils.IntegrationTest)7 ExecutionException (java.util.concurrent.ExecutionException)5 MigrationException (io.confluent.ksql.tools.migrations.MigrationException)3 Matchers.containsString (org.hamcrest.Matchers.containsString)3 KsqlArray (io.confluent.ksql.api.client.KsqlArray)2 KsqlObject (io.confluent.ksql.api.client.KsqlObject)2 KsqlClientException (io.confluent.ksql.api.client.exception.KsqlClientException)2 List (java.util.List)2 ZooKeeperClientException (kafka.zookeeper.ZooKeeperClientException)2 ImmutableList (com.google.common.collect.ImmutableList)1 Client (io.confluent.ksql.api.client.Client)1 ServerInfo (io.confluent.ksql.api.client.ServerInfo)1 ClientImpl (io.confluent.ksql.api.client.impl.ClientImpl)1 ConnectorList (io.confluent.ksql.rest.entity.ConnectorList)1 StreamedRow (io.confluent.ksql.rest.entity.StreamedRow)1 MigrationConfig (io.confluent.ksql.tools.migrations.MigrationConfig)1 ServerVersionUtil.getServerInfo (io.confluent.ksql.tools.migrations.util.ServerVersionUtil.getServerInfo)1