use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class JoinNodeTest method shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy.
@Test
public void shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy() {
setupTopicClientExpectations(1, 1);
buildJoin();
KsqlConfig ksqlConfig = mock(KsqlConfig.class);
KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClient.class);
FunctionRegistry functionRegistry = mock(FunctionRegistry.class);
class RightTable extends PlanNode {
final Schema schema;
public RightTable(final PlanNodeId id, Schema schema) {
super(id);
this.schema = schema;
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public Field getKeyField() {
return null;
}
@Override
public List<PlanNode> getSources() {
return null;
}
@Override
public SchemaKStream buildStream(StreamsBuilder builder, KsqlConfig ksqlConfig, KafkaTopicClient kafkaTopicClient, FunctionRegistry functionRegistry, Map<String, Object> props, SchemaRegistryClient schemaRegistryClient) {
if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG) && props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).toString().equalsIgnoreCase("EARLIEST")) {
return mock(SchemaKTable.class);
} else {
throw new KsqlException("auto.offset.reset should be set to EARLIEST.");
}
}
@Override
protected int getPartitions(KafkaTopicClient kafkaTopicClient) {
return 1;
}
}
RightTable rightTable = new RightTable(new PlanNodeId("1"), joinNode.getRight().getSchema());
JoinNode testJoinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), rightTable, joinNode.getLeftKeyFieldName(), joinNode.getRightKeyFieldName(), joinNode.getLeftAlias(), joinNode.getRightAlias());
testJoinNode.tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, new HashMap<>(), new MockSchemaRegistryClient());
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class CommandFactoriesTest method shouldFailCreateTableIfKeyNameIsIncorrect.
@Test
public void shouldFailCreateTableIfKeyNameIsIncorrect() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.KEY_NAME_PROPERTY, new StringLiteral("COL3"));
try {
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
} catch (KsqlException e) {
assertThat(e.getMessage(), equalTo("No column with the provided key column name in the " + "WITH clause, COL3, exists in the defined schema."));
}
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class Cli method runScript.
private void runScript(SqlBaseParser.SingleStatementContext statementContext, String statementText) throws IOException {
SqlBaseParser.RunScriptContext runScriptContext = (SqlBaseParser.RunScriptContext) statementContext.statement();
String schemaFilePath = AstBuilder.unquote(runScriptContext.STRING().getText(), "'");
String fileContent;
try {
fileContent = new String(Files.readAllBytes(Paths.get(schemaFilePath)), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new KsqlException(" Could not read statements from file: " + schemaFilePath + ". " + "Details: " + e.getMessage(), e);
}
setProperty(KsqlConstants.RUN_SCRIPT_STATEMENTS_CONTENT, fileContent);
printKsqlResponse(restClient.makeKsqlRequest(statementText));
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class StatementExecutor method handleRunScript.
private void handleRunScript(Command command) throws Exception {
if (command.getKsqlProperties().containsKey(KsqlConstants.RUN_SCRIPT_STATEMENTS_CONTENT)) {
String queries = (String) command.getKsqlProperties().get(KsqlConstants.RUN_SCRIPT_STATEMENTS_CONTENT);
List<QueryMetadata> queryMetadataList = ksqlEngine.buildMultipleQueries(queries, command.getKsqlProperties());
for (QueryMetadata queryMetadata : queryMetadataList) {
if (queryMetadata instanceof PersistentQueryMetadata) {
PersistentQueryMetadata persistentQueryMetadata = (PersistentQueryMetadata) queryMetadata;
persistentQueryMetadata.getKafkaStreams().start();
}
}
} else {
throw new KsqlException("No statements received for LOAD FROM FILE.");
}
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class KsqlResource method distributeStatement.
private CommandStatusEntity distributeStatement(String statementText, Statement statement, Map<String, Object> streamsProperties) throws KsqlException {
CommandId commandId = commandStore.distributeStatement(statementText, statement, streamsProperties);
CommandStatus commandStatus;
try {
commandStatus = statementExecutor.registerQueuedStatement(commandId).get(distributedCommandResponseTimeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException exception) {
log.warn("Timeout to get commandStatus, waited {} milliseconds:, statementText:" + statementText, distributedCommandResponseTimeout, exception);
commandStatus = statementExecutor.getStatus(commandId).get();
} catch (Exception e) {
throw new KsqlException(String.format("Could not write the statement '%s' into the command " + "topic.", statementText), e);
}
return new CommandStatusEntity(statementText, commandId, commandStatus);
}
Aggregations