use of io.confluent.ksql.parser.tree.InsertValues in project ksql by confluentinc.
the class InsertValuesExecutorTest method shouldThrowOnTablesWithKeyFieldAndNullKeyFieldValueProvided.
@Test
public void shouldThrowOnTablesWithKeyFieldAndNullKeyFieldValueProvided() {
// Given:
givenSourceTableWithSchema(SerdeFeatures.of(), SerdeFeatures.of());
final ConfiguredStatement<InsertValues> statement = givenInsertValues(ImmutableList.of(COL1), ImmutableList.of(new LongLiteral(2L)));
// When:
final Exception e = assertThrows(KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext));
// Then:
assertThat(e.getMessage(), containsString("Failed to insert values into 'TOPIC'. Value for primary key column(s) k0 is required for tables"));
}
use of io.confluent.ksql.parser.tree.InsertValues in project ksql by confluentinc.
the class InsertValuesExecutorTest method shouldThrowWhenInsertValuesOnProcessingLogTopic.
@Test
public void shouldThrowWhenInsertValuesOnProcessingLogTopic() {
// Given
givenDataSourceWithSchema("default_ksql_processing_log", SCHEMA, SerdeFeatures.of(), SerdeFeatures.of(), false, false);
final KsqlConfig ksqlConfig = new KsqlConfig(ImmutableMap.of());
final ConfiguredStatement<InsertValues> statement = ConfiguredStatement.of(PreparedStatement.of("", new InsertValues(SourceName.of("TOPIC"), allAndPseudoColumnNames(SCHEMA), ImmutableList.of(new LongLiteral(1L), new StringLiteral("str"), new StringLiteral("str"), new LongLiteral(2L)))), SessionConfig.of(ksqlConfig, ImmutableMap.of()));
// When:
final Exception e = assertThrows(KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext));
// Then:
assertThat(e.getMessage(), containsString("Cannot insert values into read-only topic: default_ksql_processing_log"));
}
use of io.confluent.ksql.parser.tree.InsertValues in project ksql by confluentinc.
the class InsertValuesExecutorTest method shouldThrowOnSerializingValueError.
@Test
public void shouldThrowOnSerializingValueError() {
// Given:
final ConfiguredStatement<InsertValues> statement = givenInsertValues(allAndPseudoColumnNames(SCHEMA), ImmutableList.of(new LongLiteral(1L), new StringLiteral("str"), new StringLiteral("str"), new LongLiteral(2L)));
when(valueSerializer.serialize(any(), any())).thenThrow(new SerializationException("Jibberish!"));
// When:
final Exception e = assertThrows(KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext));
// Then:
assertThat(e.getCause(), (hasMessage(containsString("Could not serialize value"))));
}
use of io.confluent.ksql.parser.tree.InsertValues in project ksql by confluentinc.
the class KsqlTesterTest method pipeInput.
private void pipeInput(final ConfiguredStatement<InsertValues> statement) {
final InsertValues insertValues = statement.getStatement();
final DataSource dataSource = engine.getMetaStore().getSource(insertValues.getTarget());
if (dataSource == null) {
throw new KsqlException("Unknown data source " + insertValues.getTarget());
}
final KsqlGenericRecord record = new GenericRecordFactory(config, engine.getMetaStore(), System::currentTimeMillis).build(insertValues.getColumns(), insertValues.getValues(), dataSource.getSchema(), dataSource.getDataSourceType());
driverPipeline.pipeInput(dataSource.getKafkaTopicName(), record.key, record.value, record.ts);
}
use of io.confluent.ksql.parser.tree.InsertValues in project ksql by confluentinc.
the class KsqlTesterTest method execute.
@SuppressWarnings("unchecked")
private void execute(final ParsedStatement parsedStatement) {
final PreparedStatement<?> engineStatement = engine.prepare(parsedStatement);
final ConfiguredStatement<?> configured = ConfiguredStatement.of(engineStatement, SessionConfig.of(config, overrides));
createTopics(engineStatement);
if (engineStatement.getStatement() instanceof InsertValues) {
pipeInput((ConfiguredStatement<InsertValues>) configured);
return;
} else if (engineStatement.getStatement() instanceof SetProperty) {
PropertyOverrider.set((ConfiguredStatement<SetProperty>) configured, overrides);
return;
} else if (engineStatement.getStatement() instanceof UnsetProperty) {
PropertyOverrider.unset((ConfiguredStatement<UnsetProperty>) configured, overrides);
return;
}
final ConfiguredStatement<?> injected = formatInjector.inject(configured);
final ExecuteResult result = engine.execute(serviceContext, injected);
// is DDL statement
if (!result.getQuery().isPresent()) {
return;
}
final PersistentQueryMetadata query = (PersistentQueryMetadata) result.getQuery().get();
final Topology topology = query.getTopology();
final Properties properties = new Properties();
properties.putAll(query.getStreamsProperties());
properties.put(StreamsConfig.STATE_DIR_CONFIG, tmpFolder.getRoot().getAbsolutePath());
final TopologyTestDriver driver = new TopologyTestDriver(topology, properties);
final List<TopicInfo> inputTopics = query.getSourceNames().stream().map(sn -> engine.getMetaStore().getSource(sn)).map(ds -> new TopicInfo(ds.getKafkaTopicName(), keySerde(ds), valueSerde(ds))).collect(Collectors.toList());
// Sink may be Optional for source tables. Once source table query execution is supported, then
// we would need have a condition to not create an output topic info
final DataSource output = engine.getMetaStore().getSource(query.getSinkName().get());
final TopicInfo outputInfo = new TopicInfo(output.getKafkaTopicName(), keySerde(output), valueSerde(output));
driverPipeline.addDriver(driver, inputTopics, outputInfo);
drivers.put(query.getQueryId(), new DriverAndProperties(driver, properties));
}
Aggregations