use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class AggregateNode method createAggValToFunctionMap.
private Map<Integer, KsqlAggregateFunction> createAggValToFunctionMap(final Map<String, Integer> expressionNames, final SchemaKStream aggregateArgExpanded, final SchemaBuilder aggregateSchema, final KudafInitializer initializer, final int initialUdafIndex, final FunctionRegistry functionRegistry) {
try {
int udafIndexInAggSchema = initialUdafIndex;
final Map<Integer, KsqlAggregateFunction> aggValToAggFunctionMap = new HashMap<>();
for (FunctionCall functionCall : getFunctionList()) {
KsqlAggregateFunction aggregateFunctionInfo = functionRegistry.getAggregateFunction(functionCall.getName().toString(), functionCall.getArguments(), aggregateArgExpanded.getSchema());
KsqlAggregateFunction aggregateFunction = aggregateFunctionInfo.getInstance(expressionNames, functionCall.getArguments());
aggValToAggFunctionMap.put(udafIndexInAggSchema++, aggregateFunction);
initializer.addAggregateIntializer(aggregateFunction.getInitialValueSupplier());
aggregateSchema.field("AGG_COL_" + udafIndexInAggSchema, aggregateFunction.getReturnType());
}
return aggValToAggFunctionMap;
} catch (final Exception e) {
throw new KsqlException(String.format("Failed to create aggregate val to function map. expressionNames:%s", expressionNames), e);
}
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class JoinNode method buildStream.
@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
if (!isLeftJoin()) {
throw new KsqlException("Join type is not supported yet: " + getType());
}
final SchemaKTable table = tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, schemaRegistryClient);
final SchemaKStream stream = streamForJoin(getLeft().buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, schemaRegistryClient), getLeftKeyFieldName(), kafkaTopicClient);
final KsqlTopicSerDe joinSerDe = getResultTopicSerde(this);
return stream.leftJoin(table, getSchema(), getSchema().field(getLeftAlias() + "." + stream.getKeyField().name()), joinSerDe, ksqlConfig);
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class ZipUtil method unzip.
@SuppressWarnings("ResultOfMethodCallIgnored")
public static void unzip(final File sourceFile, final File outputDir) {
if (!outputDir.exists() && !outputDir.mkdirs()) {
throw new KsqlException("Failed to create output directory: " + outputDir);
}
try (ZipInputStream input = new ZipInputStream(new FileInputStream(sourceFile))) {
ZipEntry entry;
while ((entry = input.getNextEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
final File file = new File(outputDir, entry.getName());
final File parent = file.getParentFile();
if (!parent.exists() && !parent.mkdirs()) {
throw new KsqlException("Failed to create output directory: " + parent);
}
try (FileOutputStream output = new FileOutputStream(file)) {
IOUtils.copy(input, output);
} catch (final Exception e) {
throw new RuntimeException("Error expanding entry '" + entry.getName() + "'", e);
}
}
input.closeEntry();
} catch (final Exception e) {
throw new KsqlException("Failed to unzip '" + sourceFile + "' into '" + outputDir + "'", e);
}
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class CommandStore method distributeStatement.
/**
* Write the given statement to the command topic, to be read by all nodes in the current
* cluster.
* Does not return until the statement has been successfully written, or an exception is thrown.
*
* @param statementString The string of the statement to be distributed
* @param statement The statement to be distributed
* @param streamsProperties Any command-specific Streams properties to use.
* @return The ID assigned to the statement
*/
public CommandId distributeStatement(String statementString, Statement statement, Map<String, Object> streamsProperties) throws KsqlException {
final CommandId commandId = commandIdAssigner.getCommandId(statement);
final Command command = new Command(statementString, streamsProperties);
try {
commandProducer.send(new ProducerRecord<>(commandTopic, commandId, command)).get();
} catch (Exception e) {
throw new KsqlException(String.format("Could not write the statement '%s' into the " + "command topic" + ".", statementString), e);
}
return commandId;
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class KsqlResource method registerDdlCommandTasks.
private void registerDdlCommandTasks() {
ddlCommandTasks.put(Query.class, (statement, statementText, properties) -> ksqlEngine.getQueryExecutionPlan((Query) statement).getExecutionPlan());
ddlCommandTasks.put(CreateStreamAsSelect.class, (statement, statementText, properties) -> {
QueryMetadata queryMetadata = ksqlEngine.getQueryExecutionPlan(((CreateStreamAsSelect) statement).getQuery());
if (queryMetadata.getDataSourceType() == DataSource.DataSourceType.KTABLE) {
throw new KsqlException("Invalid result type. Your SELECT query produces a TABLE. Please " + "use CREATE TABLE AS SELECT statement instead.");
}
if (queryMetadata instanceof PersistentQueryMetadata) {
new AvroUtil().validatePersistentQueryResults((PersistentQueryMetadata) queryMetadata, ksqlEngine.getSchemaRegistryClient());
}
queryMetadata.close();
return queryMetadata.getExecutionPlan();
});
ddlCommandTasks.put(CreateTableAsSelect.class, (statement, statementText, properties) -> {
QueryMetadata queryMetadata = ksqlEngine.getQueryExecutionPlan(((CreateTableAsSelect) statement).getQuery());
if (queryMetadata.getDataSourceType() != DataSource.DataSourceType.KTABLE) {
throw new KsqlException("Invalid result type. Your SELECT query produces a STREAM. Please " + "use CREATE STREAM AS SELECT statement instead.");
}
if (queryMetadata instanceof PersistentQueryMetadata) {
new AvroUtil().validatePersistentQueryResults((PersistentQueryMetadata) queryMetadata, ksqlEngine.getSchemaRegistryClient());
}
queryMetadata.close();
return queryMetadata.getExecutionPlan();
});
ddlCommandTasks.put(RegisterTopic.class, (statement, statementText, properties) -> {
RegisterTopicCommand registerTopicCommand = new RegisterTopicCommand((RegisterTopic) statement);
new DdlCommandExec(ksqlEngine.getMetaStore().clone()).execute(registerTopicCommand);
return statement.toString();
});
ddlCommandTasks.put(CreateStream.class, (statement, statementText, properties) -> {
CreateStreamCommand createStreamCommand = new CreateStreamCommand(statementText, (CreateStream) statement, properties, ksqlEngine.getTopicClient(), true);
executeDdlCommand(createStreamCommand);
return statement.toString();
});
ddlCommandTasks.put(CreateTable.class, (statement, statementText, properties) -> {
CreateTableCommand createTableCommand = new CreateTableCommand(statementText, (CreateTable) statement, properties, ksqlEngine.getTopicClient(), true);
executeDdlCommand(createTableCommand);
return statement.toString();
});
ddlCommandTasks.put(DropTopic.class, (statement, statementText, properties) -> {
DropTopicCommand dropTopicCommand = new DropTopicCommand((DropTopic) statement);
new DdlCommandExec(ksqlEngine.getMetaStore().clone()).execute(dropTopicCommand);
return statement.toString();
});
ddlCommandTasks.put(DropStream.class, (statement, statementText, properties) -> {
DropSourceCommand dropSourceCommand = new DropSourceCommand((DropStream) statement, DataSource.DataSourceType.KSTREAM, ksqlEngine);
executeDdlCommand(dropSourceCommand);
return statement.toString();
});
ddlCommandTasks.put(DropTable.class, (statement, statementText, properties) -> {
DropSourceCommand dropSourceCommand = new DropSourceCommand((DropTable) statement, DataSource.DataSourceType.KTABLE, ksqlEngine);
executeDdlCommand(dropSourceCommand);
return statement.toString();
});
ddlCommandTasks.put(TerminateQuery.class, (statement, statementText, properties) -> statement.toString());
}
Aggregations