use of io.confluent.ksql.metastore.model.KsqlTable in project ksql by confluentinc.
the class DdlCommandExecTest method shouldAddNormalTableWhenNoTypeIsSpecified.
@Test
public void shouldAddNormalTableWhenNoTypeIsSpecified() {
// Given:
final CreateTableCommand cmd = buildCreateTable(SourceName.of("t1"), false, null);
// When:
cmdExec.execute(SQL_TEXT, cmd, true, NO_QUERY_SOURCES);
// Then:
final KsqlTable ksqlTable = (KsqlTable) metaStore.getSource(SourceName.of("t1"));
assertThat(ksqlTable.isSource(), is(false));
}
use of io.confluent.ksql.metastore.model.KsqlTable in project ksql by confluentinc.
the class SchemaKStreamTest method init.
@Before
@SuppressWarnings("rawtypes")
public void init() {
functionRegistry = new InternalFunctionRegistry();
schemaResolver = new StepSchemaResolver(ksqlConfig, functionRegistry);
ksqlStream = (KsqlStream) metaStore.getSource(SourceName.of("TEST1"));
final KsqlTable<?> ksqlTable = (KsqlTable) metaStore.getSource(SourceName.of("TEST2"));
schemaKTable = new SchemaKTable(tableSourceStep, ksqlTable.getSchema(), keyFormat, ksqlConfig, functionRegistry);
schemaKStream = new SchemaKStream(streamSourceStep, ksqlStream.getSchema(), keyFormat, ksqlConfig, functionRegistry);
}
use of io.confluent.ksql.metastore.model.KsqlTable in project ksql by confluentinc.
the class CreateSourceFactoryTest method shouldThrowInCreateStreamOrReplaceOnSourceTables.
@Test
public void shouldThrowInCreateStreamOrReplaceOnSourceTables() {
// Given:
final SourceName existingTableName = SourceName.of("existingTableName");
final KsqlTable existingTable = mock(KsqlTable.class);
when(existingTable.getDataSourceType()).thenReturn(DataSourceType.KTABLE);
when(existingTable.isSource()).thenReturn(true);
when(metaStore.getSource(existingTableName)).thenReturn(existingTable);
final CreateTable ddlStatement = new CreateTable(existingTableName, TableElements.of(tableElement("COL1", new Type(BIGINT), PRIMARY_KEY_CONSTRAINT), tableElement("COL2", new Type(SqlTypes.STRING))), true, false, withProperties, false);
// When:
final Exception e = assertThrows(KsqlException.class, () -> createSourceFactory.createTableCommand(ddlStatement, ksqlConfig));
// Then:
assertThat(e.getMessage(), containsString("Cannot add table 'existingTableName': CREATE OR REPLACE is not supported on " + "source tables."));
}
use of io.confluent.ksql.metastore.model.KsqlTable in project ksql by confluentinc.
the class MetaStoreFixture method getNewMetaStore.
public static MutableMetaStore getNewMetaStore(final FunctionRegistry functionRegistry, final ValueFormat valueFormat) {
final MutableMetaStore metaStore = new MetaStoreImpl(functionRegistry);
final KeyFormat keyFormat = KeyFormat.nonWindowed(FormatInfo.of(FormatFactory.KAFKA.name()), SerdeFeatures.of());
final LogicalSchema test1Schema = LogicalSchema.builder().keyColumn(ColumnName.of("COL0"), SqlTypes.BIGINT).valueColumn(ColumnName.of("COL1"), SqlTypes.STRING).valueColumn(ColumnName.of("COL2"), SqlTypes.STRING).valueColumn(ColumnName.of("COL3"), SqlTypes.DOUBLE).valueColumn(ColumnName.of("COL4"), SqlTypes.array(SqlTypes.DOUBLE)).valueColumn(ColumnName.of("COL5"), SqlTypes.map(SqlTypes.STRING, SqlTypes.DOUBLE)).headerColumn(ColumnName.of("HEAD"), Optional.empty()).build();
final KsqlTopic ksqlTopic0 = new KsqlTopic("test0", keyFormat, valueFormat);
final KsqlStream<?> ksqlStream0 = new KsqlStream<>("sqlexpression", SourceName.of("TEST0"), test1Schema, Optional.empty(), false, ksqlTopic0, false);
metaStore.putSource(ksqlStream0, false);
final KsqlTopic ksqlTopic1 = new KsqlTopic("test1", keyFormat, valueFormat);
final KsqlStream<?> ksqlStream1 = new KsqlStream<>("sqlexpression", SourceName.of("TEST1"), test1Schema, Optional.empty(), false, ksqlTopic1, false);
metaStore.putSource(ksqlStream1, false);
final LogicalSchema test2Schema = LogicalSchema.builder().keyColumn(ColumnName.of("COL0"), SqlTypes.BIGINT).valueColumn(ColumnName.of("COL1"), SqlTypes.STRING).valueColumn(ColumnName.of("COL2"), SqlTypes.STRING).valueColumn(ColumnName.of("COL3"), SqlTypes.DOUBLE).valueColumn(ColumnName.of("COL4"), SqlTypes.BOOLEAN).build();
final KsqlTopic ksqlTopic2 = new KsqlTopic("test2", keyFormat, valueFormat);
final KsqlTable<String> ksqlTable = new KsqlTable<>("sqlexpression", SourceName.of("TEST2"), test2Schema, Optional.empty(), false, ksqlTopic2, false);
metaStore.putSource(ksqlTable, false);
final SqlType addressSchema = SqlTypes.struct().field("NUMBER", SqlTypes.BIGINT).field("STREET", SqlTypes.STRING).field("CITY", SqlTypes.STRING).field("STATE", SqlTypes.STRING).field("ZIPCODE", SqlTypes.BIGINT).build();
final SqlType categorySchema = SqlTypes.struct().field("ID", SqlTypes.BIGINT).field("NAME", SqlTypes.STRING).build();
final SqlType itemInfoSchema = SqlTypes.struct().field("ITEMID", SqlTypes.BIGINT).field("NAME", SqlTypes.STRING).field("CATEGORY", categorySchema).build();
final LogicalSchema ordersSchema = LogicalSchema.builder().keyColumn(ColumnName.of("ORDERTIME"), SqlTypes.BIGINT).valueColumn(ColumnName.of("ORDERID"), SqlTypes.BIGINT).valueColumn(ColumnName.of("ITEMID"), SqlTypes.STRING).valueColumn(ColumnName.of("ITEMINFO"), itemInfoSchema).valueColumn(ColumnName.of("ORDERUNITS"), SqlTypes.INTEGER).valueColumn(ColumnName.of("ARRAYCOL"), SqlTypes.array(SqlTypes.DOUBLE)).valueColumn(ColumnName.of("MAPCOL"), SqlTypes.map(SqlTypes.STRING, SqlTypes.DOUBLE)).valueColumn(ColumnName.of("ADDRESS"), addressSchema).valueColumn(ColumnName.of("TIMESTAMPCOL"), SqlTypes.TIMESTAMP).valueColumn(ColumnName.of("TIMECOL"), SqlTypes.TIME).valueColumn(ColumnName.of("DATECOL"), SqlTypes.DATE).valueColumn(ColumnName.of("BYTESCOL"), SqlTypes.BYTES).build();
final KsqlTopic ksqlTopicOrders = new KsqlTopic("orders_topic", keyFormat, valueFormat);
final KsqlStream<?> ksqlStreamOrders = new KsqlStream<>("sqlexpression", SourceName.of("ORDERS"), ordersSchema, Optional.empty(), false, ksqlTopicOrders, false);
metaStore.putSource(ksqlStreamOrders, false);
final LogicalSchema testTable3 = LogicalSchema.builder().keyColumn(ColumnName.of("COL0"), SqlTypes.BIGINT).valueColumn(ColumnName.of("COL1"), SqlTypes.STRING).valueColumn(ColumnName.of("COL2"), SqlTypes.STRING).valueColumn(ColumnName.of("COL3"), SqlTypes.DOUBLE).valueColumn(ColumnName.of("COL4"), SqlTypes.BOOLEAN).build();
final KsqlTopic ksqlTopic3 = new KsqlTopic("test3", keyFormat, valueFormat);
final KsqlTable<String> ksqlTable3 = new KsqlTable<>("sqlexpression", SourceName.of("TEST3"), testTable3, Optional.empty(), false, ksqlTopic3, false);
metaStore.putSource(ksqlTable3, false);
final SqlType nestedOrdersSchema = SqlTypes.struct().field("ORDERTIME", SqlTypes.BIGINT).field("ORDERID", SqlTypes.BIGINT).field("ITEMID", SqlTypes.STRING).field("ITEMINFO", itemInfoSchema).field("ORDERUNITS", SqlTypes.INTEGER).field("ARRAYCOL", SqlTypes.array(SqlTypes.DOUBLE)).field("MAPCOL", SqlTypes.map(SqlTypes.STRING, SqlTypes.DOUBLE)).field("ADDRESS", addressSchema).build();
final LogicalSchema nestedArrayStructMapSchema = LogicalSchema.builder().keyColumn(ColumnName.of("K"), SqlTypes.STRING).valueColumn(ColumnName.of("ARRAYCOL"), SqlTypes.array(itemInfoSchema)).valueColumn(ColumnName.of("MAPCOL"), SqlTypes.map(SqlTypes.STRING, itemInfoSchema)).valueColumn(ColumnName.of("NESTED_ORDER_COL"), nestedOrdersSchema).valueColumn(ColumnName.of("ITEM"), itemInfoSchema).build();
final KsqlTopic nestedArrayStructMapTopic = new KsqlTopic("NestedArrayStructMap_topic", keyFormat, valueFormat);
final KsqlStream<?> nestedArrayStructMapOrders = new KsqlStream<>("sqlexpression", SourceName.of("NESTED_STREAM"), nestedArrayStructMapSchema, Optional.empty(), false, nestedArrayStructMapTopic, false);
metaStore.putSource(nestedArrayStructMapOrders, false);
final KsqlTopic ksqlTopic4 = new KsqlTopic("test4", keyFormat, valueFormat);
final KsqlStream<?> ksqlStream4 = new KsqlStream<>("sqlexpression4", SourceName.of("TEST4"), test1Schema, Optional.empty(), false, ksqlTopic4, false);
metaStore.putSource(ksqlStream4, false);
final LogicalSchema sensorReadingsSchema = LogicalSchema.builder().keyColumn(ColumnName.of("ID"), SqlTypes.BIGINT).valueColumn(ColumnName.of("SENSOR_NAME"), SqlTypes.STRING).valueColumn(ColumnName.of("ARR1"), SqlTypes.array(SqlTypes.BIGINT)).valueColumn(ColumnName.of("ARR2"), SqlTypes.array(SqlTypes.STRING)).build();
final KsqlTopic ksqlTopicSensorReadings = new KsqlTopic("sensor_readings_topic", keyFormat, valueFormat);
final KsqlStream<?> ksqlStreamSensorReadings = new KsqlStream<>("sqlexpression", SourceName.of("SENSOR_READINGS"), sensorReadingsSchema, Optional.empty(), false, ksqlTopicSensorReadings, false);
metaStore.putSource(ksqlStreamSensorReadings, false);
final LogicalSchema testTable5 = LogicalSchema.builder().keyColumn(ColumnName.of("A"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("B"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("C"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("D"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("E"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("F"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("G"), SqlTypes.BOOLEAN).build();
final KsqlTopic ksqlTopic5 = new KsqlTopic("test5", keyFormat, valueFormat);
final KsqlTable<String> ksqlTable5 = new KsqlTable<>("sqlexpression", SourceName.of("TEST5"), testTable5, Optional.empty(), false, ksqlTopic5, false);
metaStore.putSource(ksqlTable5, false);
return metaStore;
}
use of io.confluent.ksql.metastore.model.KsqlTable in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldShowTables.
@Test
public void shouldShowTables() {
// Given:
final KsqlTable<?> table1 = engine.givenSource(DataSourceType.KTABLE, "table1");
final KsqlTable<?> table2 = engine.givenSource(DataSourceType.KTABLE, "table2");
engine.givenSource(DataSourceType.KSTREAM, "stream");
// When:
final TablesList descriptionList = (TablesList) CUSTOM_EXECUTORS.listTables().execute((ConfiguredStatement<ListTables>) engine.configure("LIST TABLES;"), SESSION_PROPERTIES, engine.getEngine(), engine.getServiceContext()).getEntity().orElseThrow(IllegalStateException::new);
// Then:
assertThat(descriptionList.getTables(), containsInAnyOrder(new SourceInfo.Table(table1.getName().toString(FormatOptions.noEscape()), table1.getKsqlTopic().getKafkaTopicName(), table2.getKsqlTopic().getKeyFormat().getFormat(), table1.getKsqlTopic().getValueFormat().getFormat(), table1.getKsqlTopic().getKeyFormat().isWindowed()), new SourceInfo.Table(table2.getName().toString(FormatOptions.noEscape()), table2.getKsqlTopic().getKafkaTopicName(), table2.getKsqlTopic().getKeyFormat().getFormat(), table2.getKsqlTopic().getValueFormat().getFormat(), table2.getKsqlTopic().getKeyFormat().isWindowed())));
}
Aggregations