use of io.confluent.ksql.util.KsqlStatementException in project ksql by confluentinc.
the class SchemaRegisterInjector method registerRawSchema.
private void registerRawSchema(final SchemaAndId schemaAndId, final String topic, final String statementText, final String subject, final Boolean isKey) {
final int id;
try {
id = SchemaRegistryUtil.registerSchema(serviceContext.getSchemaRegistryClient(), schemaAndId.rawSchema, topic, subject, isKey);
} catch (KsqlException e) {
throw new KsqlStatementException("Could not register schema for topic: " + e.getMessage(), statementText, e);
}
final boolean isSandbox = serviceContext instanceof SandboxedServiceContext;
// will return fixed id when register is called.
if (!isSandbox && id != schemaAndId.id) {
final String schemaIdPropStr = isKey ? CommonCreateConfigs.KEY_SCHEMA_ID : CommonCreateConfigs.VALUE_SCHEMA_ID;
throw new KsqlStatementException("Schema id registered is " + id + " which is different from provided " + schemaIdPropStr + " " + schemaAndId.id + "." + System.lineSeparator() + "Topic: " + topic + System.lineSeparator() + "Subject: " + subject + System.lineSeparator() + "Schema: " + schemaAndId.rawSchema, statementText);
}
}
use of io.confluent.ksql.util.KsqlStatementException in project ksql by confluentinc.
the class SchemaRegisterInjector method registerForCreateAs.
private void registerForCreateAs(final ConfiguredStatement<? extends CreateAsSelect> cas) {
final CreateSourceCommand createSourceCommand;
try {
final ServiceContext sandboxServiceContext = SandboxedServiceContext.create(serviceContext);
createSourceCommand = (CreateSourceCommand) executionContext.createSandbox(sandboxServiceContext).plan(sandboxServiceContext, cas).getDdlCommand().get();
} catch (final Exception e) {
throw new KsqlStatementException("Could not determine output schema for query due to error: " + e.getMessage(), cas.getStatementText(), e);
}
final SchemaAndId rawKeySchema = (SchemaAndId) cas.getSessionConfig().getOverrides().get(CommonCreateConfigs.KEY_SCHEMA_ID);
final SchemaAndId rawValueSchema = (SchemaAndId) cas.getSessionConfig().getOverrides().get(CommonCreateConfigs.VALUE_SCHEMA_ID);
registerSchemas(createSourceCommand.getSchema(), Pair.of(rawKeySchema, rawValueSchema), createSourceCommand.getTopicName(), createSourceCommand.getFormats().getKeyFormat(), createSourceCommand.getFormats().getKeyFeatures(), createSourceCommand.getFormats().getValueFormat(), createSourceCommand.getFormats().getValueFeatures(), cas.getSessionConfig().getConfig(false), cas.getStatementText(), true);
}
use of io.confluent.ksql.util.KsqlStatementException in project ksql by confluentinc.
the class InsertValuesExecutor method buildRecord.
private ProducerRecord<byte[], byte[]> buildRecord(final ConfiguredStatement<InsertValues> statement, final MetaStore metaStore, final DataSource dataSource, final ServiceContext serviceContext) {
throwIfDisabled(statement.getSessionConfig().getConfig(false));
final InsertValues insertValues = statement.getStatement();
final KsqlConfig config = statement.getSessionConfig().getConfig(true);
try {
final KsqlGenericRecord row = new GenericRecordFactory(config, metaStore, clock).build(insertValues.getColumns(), insertValues.getValues(), dataSource.getSchema(), dataSource.getDataSourceType());
final byte[] key = serializeKey(row.key, dataSource, config, serviceContext);
final byte[] value = serializeValue(row.value, dataSource, config, serviceContext);
final String topicName = dataSource.getKafkaTopicName();
return new ProducerRecord<>(topicName, null, row.ts, key, value);
} catch (final Exception e) {
throw new KsqlStatementException(createInsertFailedExceptionMessage(insertValues) + " " + e.getMessage(), statement.getStatementText(), e);
}
}
use of io.confluent.ksql.util.KsqlStatementException in project ksql by confluentinc.
the class ValidatedCommandFactory method createForTerminateQuery.
private static Command createForTerminateQuery(final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext context) {
final TerminateQuery terminateQuery = (TerminateQuery) statement.getStatement();
final Optional<QueryId> queryId = terminateQuery.getQueryId();
if (!queryId.isPresent()) {
context.getPersistentQueries().forEach(PersistentQueryMetadata::close);
return Command.of(statement);
} else if (queryId.get().toString().toLowerCase().contains(KsqlConfig.KSQL_TRANSIENT_QUERY_NAME_PREFIX_DEFAULT)) {
return Command.of(statement);
}
final PersistentQueryMetadata queryMetadata = context.getPersistentQuery(queryId.get()).orElseThrow(() -> new KsqlStatementException("Unknown queryId: " + queryId.get(), statement.getStatementText()));
if (queryMetadata.getPersistentQueryType() == KsqlConstants.PersistentQueryType.CREATE_SOURCE) {
throw new KsqlStatementException(String.format("Cannot terminate query '%s' because it is linked to a source table.", queryId.get()), statement.getStatementText());
}
queryMetadata.close();
return Command.of(statement);
}
use of io.confluent.ksql.util.KsqlStatementException in project ksql by confluentinc.
the class KsqlEngineTest method shouldThrowWhenExecutingDuplicateTable.
@Test
public void shouldThrowWhenExecutingDuplicateTable() {
// Given:
final List<ParsedStatement> parsed = ksqlEngine.parse("CREATE TABLE FOO AS SELECT * FROM TEST2; " + "CREATE TABLE FOO WITH (KAFKA_TOPIC='BAR') AS SELECT * FROM TEST2;");
givenStatementAlreadyExecuted(parsed.get(0));
final PreparedStatement<?> prepared = prepare(parsed.get(1));
// When:
final KsqlStatementException e = assertThrows(KsqlStatementException.class, () -> ksqlEngine.execute(serviceContext, ConfiguredStatement.of(prepared, SessionConfig.of(ksqlConfig, new HashMap<>()))));
// Then:
assertThat(e, rawMessage(is("Cannot add table 'FOO': A table with the same name already exists")));
assertThat(e, statementText(is("CREATE TABLE FOO WITH (KAFKA_TOPIC='BAR') AS SELECT * FROM TEST2;")));
}
Aggregations