Search in sources :

Example 16 with KsqlConfig

use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.

the class KsqlGenericRowAvroSerializerTest method shouldFailForIncompatibleType.

@Test
public void shouldFailForIncompatibleType() {
    SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
    KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    try {
        byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
        Assert.fail("Did not fail for incompatible types.");
    } catch (Exception e) {
        assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
    }
}
Also used : GenericRow(io.confluent.ksql.GenericRow) HashMap(java.util.HashMap) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) List(java.util.List) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) Test(org.junit.Test)

Example 17 with KsqlConfig

use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.

the class SqlPredicateTest method init.

@Before
public void init() {
    metaStore = MetaStoreFixture.getNewMetaStore();
    functionRegistry = new FunctionRegistry();
    ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
    StreamsBuilder builder = new StreamsBuilder();
    kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) Before(org.junit.Before)

Example 18 with KsqlConfig

use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.

the class KsqlSchemaRegistryClientFactoryTest method getTestSslContext.

// Can't mock SSLContext.
private static SSLContext getTestSslContext() {
    final SslFactory sslFactory = new SslFactory(Mode.CLIENT);
    final Map<String, Object> configs = new KsqlConfig(Collections.emptyMap()).valuesWithPrefixOverride(KsqlConfig.KSQL_SCHEMA_REGISTRY_PREFIX);
    sslFactory.configure(configs);
    return sslFactory.sslContext();
}
Also used : KsqlConfig(io.confluent.ksql.util.KsqlConfig) SslFactory(org.apache.kafka.common.security.ssl.SslFactory)

Example 19 with KsqlConfig

use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.

the class DataGen method main.

public static void main(String[] args) {
    Arguments arguments;
    try {
        arguments = new Arguments.Builder().parseArgs(args).build();
    } catch (Arguments.ArgumentParseException exception) {
        System.err.println(exception.getMessage());
        usage(1);
        return;
    } catch (IOException exception) {
        System.err.printf("IOException encountered: %s%n", exception.getMessage());
        return;
    }
    if (arguments.help) {
        usage(0);
    }
    Generator generator;
    try {
        generator = new Generator(arguments.schemaFile, new Random());
    } catch (IOException exception) {
        System.err.printf("IOException encountered: %s%n", exception.getMessage());
        return;
    }
    DataGenProducer dataProducer;
    switch(arguments.format) {
        case AVRO:
            dataProducer = new AvroProducer(new KsqlConfig(Collections.singletonMap(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY, arguments.schemaRegistryUrl)));
            break;
        case JSON:
            dataProducer = new JsonProducer();
            break;
        case DELIMITED:
            dataProducer = new DelimitedProducer();
            break;
        default:
            System.err.printf("Invalid format in '%s'; was expecting one of AVRO, JSON, or DELIMITED%n", arguments.format);
            usage(1);
            return;
    }
    Properties props = new Properties();
    props.put("bootstrap.servers", arguments.bootstrapServer);
    props.put("client.id", "KSQLDataGenProducer");
    try {
        if (arguments.propertiesFile != null) {
            props.load(arguments.propertiesFile);
        }
    } catch (IOException exception) {
        System.err.printf("IOException encountered: %s%n", exception.getMessage());
        return;
    }
    dataProducer.populateTopic(props, generator, arguments.topicName, arguments.keyName, arguments.iterations, arguments.maxInterval);
}
Also used : Random(java.util.Random) KsqlConfig(io.confluent.ksql.util.KsqlConfig) IOException(java.io.IOException) Properties(java.util.Properties) Generator(io.confluent.avro.random.generator.Generator)

Example 20 with KsqlConfig

use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.

the class EmbeddedKsql method main.

public static void main(String[] args) throws Exception {
    KsqlContext ksqlContext = KsqlContext.create(new KsqlConfig(Collections.emptyMap()));
    ksqlContext.sql("REGISTER TOPIC orders_topic WITH (format = 'json', " + "kafka_topic='orders_topic_json');");
    ksqlContext.sql("CREATE STREAM orders (ordertime bigint, orderid bigint, itemid varchar, " + "orderunits double, arraycol array<double>, mapcol map<varchar, double>) " + "WITH (topicname = 'orders_topic' , key='orderid');\n");
    ksqlContext.sql("CREATE STREAM BIGORDERS AS SELECT * FROM ORDERS WHERE ORDERUNITS > 5;");
    ksqlContext.sql("SELECT * FROM ORDERS;");
    ksqlContext.sql("CREATE TABLE ORDERSUMS AS select itemid, sum(orderunits) from orders window " + "TUMBLING ( size 30 second) group by itemid;");
    System.out.println("Queries are running!");
}
Also used : KsqlConfig(io.confluent.ksql.util.KsqlConfig) KsqlContext(io.confluent.ksql.KsqlContext)

Aggregations

KsqlConfig (io.confluent.ksql.util.KsqlConfig)29 HashMap (java.util.HashMap)13 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)11 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)9 Before (org.junit.Before)9 Test (org.junit.Test)9 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)8 KsqlEngine (io.confluent.ksql.KsqlEngine)7 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)6 List (java.util.List)5 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)5 GenericRow (io.confluent.ksql.GenericRow)4 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)4 Map (java.util.Map)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)2 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)2 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)2 MockKafkaTopicClient (io.confluent.ksql.rest.server.mock.MockKafkaTopicClient)2 KsqlException (io.confluent.ksql.util.KsqlException)2 GenericData (org.apache.avro.generic.GenericData)2