Search in sources :

Example 1 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class DataSourceNodeTest method before.

@Before
@SuppressWarnings("unchecked")
public void before() {
    realBuilder = new StreamsBuilder();
    when(buildContext.getKsqlConfig()).thenReturn(realConfig);
    when(buildContext.getFunctionRegistry()).thenReturn(functionRegistry);
    when(buildContext.buildNodeContext(any())).thenAnswer(inv -> new QueryContext.Stacker().push(inv.getArgument(0).toString()));
    when(executeContext.getKsqlConfig()).thenReturn(realConfig);
    when(executeContext.getStreamsBuilder()).thenReturn(realBuilder);
    when(executeContext.getProcessingLogger(any())).thenReturn(processingLogger);
    when(executeContext.buildKeySerde(any(), any(), any())).thenReturn((Serde) keySerde);
    when(executeContext.buildValueSerde(any(), any(), any())).thenReturn(rowSerde);
    when(rowSerde.serializer()).thenReturn(mock(Serializer.class));
    when(rowSerde.deserializer()).thenReturn(mock(Deserializer.class));
    when(dataSource.getKsqlTopic()).thenReturn(topic);
    when(dataSource.getDataSourceType()).thenReturn(DataSourceType.KTABLE);
    when(schemaKStreamFactory.create(any(), any(), any())).thenAnswer(inv -> inv.<DataSource>getArgument(1).getDataSourceType() == DataSourceType.KSTREAM ? stream : table);
    givenWindowedSource(false);
    when(ksqlConfig.getBoolean(KsqlConfig.KSQL_ROWPARTITION_ROWOFFSET_ENABLED)).thenReturn(true);
    node = new DataSourceNode(PLAN_NODE_ID, SOME_SOURCE, SOME_SOURCE.getName(), false, ksqlConfig);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Deserializer(org.apache.kafka.common.serialization.Deserializer) Serializer(org.apache.kafka.common.serialization.Serializer) DataSource(io.confluent.ksql.metastore.model.DataSource) Before(org.junit.Before)

Example 2 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class QueryBuilderTest method shouldStartCreateSourceQueryWithMaterializationProvider.

@Test
public void shouldStartCreateSourceQueryWithMaterializationProvider() {
    when(ksqlConfig.getBoolean(KsqlConfig.KSQL_SHARED_RUNTIME_ENABLED)).thenReturn(true);
    // Given:
    final DataSource source = givenSource("foo");
    when(source.getSchema()).thenReturn(SINK_SCHEMA);
    when(source.getKsqlTopic()).thenReturn(ksqlTopic);
    final PersistentQueryMetadata queryMetadata = buildPersistentQuery(ImmutableSet.of(source), KsqlConstants.PersistentQueryType.CREATE_SOURCE, QUERY_ID, Optional.empty());
    queryMetadata.initialize();
    queryMetadata.register();
    queryMetadata.start();
    // When:
    final Optional<Materialization> result = queryMetadata.getMaterialization(QUERY_ID, stacker);
    // Then:
    assertThat(result.get(), is(materialization));
}
Also used : KsMaterialization(io.confluent.ksql.execution.streams.materialization.ks.KsMaterialization) Materialization(io.confluent.ksql.execution.streams.materialization.Materialization) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource) Test(org.junit.Test)

Example 3 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class QueryBuilderTest method givenSource.

private static DataSource givenSource(final String name) {
    final DataSource source = Mockito.mock(DataSource.class);
    when(source.getName()).thenReturn(SourceName.of(name));
    return source;
}
Also used : DataSource(io.confluent.ksql.metastore.model.DataSource)

Example 4 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class QueryBuilderTest method shouldBuildTransientQueryCorrectly.

@Test
public void shouldBuildTransientQueryCorrectly() {
    // Given:
    givenTransientQuery();
    // When:
    final TransientQueryMetadata queryMetadata = queryBuilder.buildTransientQuery(STATEMENT_TEXT, QUERY_ID, SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet()), physicalPlan, SUMMARY, TRANSIENT_SINK_SCHEMA, LIMIT, Optional.empty(), false, queryListener, streamsBuilder, Optional.empty(), new MetricCollectors());
    queryMetadata.initialize();
    // Then:
    assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
    assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
    assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
    assertThat(queryMetadata.getTopology(), is(topology));
    assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
    verify(kafkaStreamsBuilder).build(any(), propertyCaptor.capture());
    assertThat(queryMetadata.getStreamsProperties(), equalTo(propertyCaptor.getValue()));
}
Also used : MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource) Test(org.junit.Test)

Example 5 with DataSource

use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.

the class InsertValuesExecutor method execute.

// Part of required API.
@SuppressWarnings("unused")
public void execute(final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext) {
    final InsertValues insertValues = statement.getStatement();
    final MetaStore metaStore = executionContext.getMetaStore();
    final KsqlConfig config = statement.getSessionConfig().getConfig(true);
    final DataSource dataSource = getDataSource(config, metaStore, insertValues);
    validateInsert(insertValues.getColumns(), dataSource);
    final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext);
    try {
        producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps());
    } catch (final TopicAuthorizationException e) {
        // TopicAuthorizationException does not give much detailed information about why it failed,
        // except which topics are denied. Here we just add the ACL to make the error message
        // consistent with other authorization error messages.
        final Exception rootCause = new KsqlTopicAuthorizationException(AclOperation.WRITE, e.unauthorizedTopics());
        throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause);
    } catch (final Exception e) {
        throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e);
    }
}
Also used : KsqlTopicAuthorizationException(io.confluent.ksql.exception.KsqlTopicAuthorizationException) MetaStore(io.confluent.ksql.metastore.MetaStore) InsertValues(io.confluent.ksql.parser.tree.InsertValues) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KsqlException(io.confluent.ksql.util.KsqlException) KsqlTopicAuthorizationException(io.confluent.ksql.exception.KsqlTopicAuthorizationException) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) KsqlTopicAuthorizationException(io.confluent.ksql.exception.KsqlTopicAuthorizationException) RestClientException(io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException) KsqlException(io.confluent.ksql.util.KsqlException) KsqlStatementException(io.confluent.ksql.util.KsqlStatementException) ExecutionException(java.util.concurrent.ExecutionException) KsqlSchemaAuthorizationException(io.confluent.ksql.exception.KsqlSchemaAuthorizationException) TopicAuthorizationException(org.apache.kafka.common.errors.TopicAuthorizationException) DataSource(io.confluent.ksql.metastore.model.DataSource)

Aggregations

DataSource (io.confluent.ksql.metastore.model.DataSource)70 Test (org.junit.Test)25 KsqlException (io.confluent.ksql.util.KsqlException)24 SourceName (io.confluent.ksql.name.SourceName)21 KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)12 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)12 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)9 Collectors (java.util.stream.Collectors)9 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)8 PreparedStatement (io.confluent.ksql.parser.KsqlParser.PreparedStatement)7 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)7 Optional (java.util.Optional)7 ImmutableList (com.google.common.collect.ImmutableList)6 GenericKey (io.confluent.ksql.GenericKey)6 QueryId (io.confluent.ksql.query.QueryId)6 ServiceContext (io.confluent.ksql.services.ServiceContext)6 KsqlConfig (io.confluent.ksql.util.KsqlConfig)6 Collections (java.util.Collections)6 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)6