use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldFailForIncompatibleType.
@Test
public void shouldFailForIncompatibleType() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
try {
byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
Assert.fail("Did not fail for incompatible types.");
} catch (Exception e) {
assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
}
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class SqlPredicateTest method init.
@Before
public void init() {
metaStore = MetaStoreFixture.getNewMetaStore();
functionRegistry = new FunctionRegistry();
ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
StreamsBuilder builder = new StreamsBuilder();
kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class KsqlSchemaRegistryClientFactoryTest method getTestSslContext.
// Can't mock SSLContext.
private static SSLContext getTestSslContext() {
final SslFactory sslFactory = new SslFactory(Mode.CLIENT);
final Map<String, Object> configs = new KsqlConfig(Collections.emptyMap()).valuesWithPrefixOverride(KsqlConfig.KSQL_SCHEMA_REGISTRY_PREFIX);
sslFactory.configure(configs);
return sslFactory.sslContext();
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class DataGen method main.
public static void main(String[] args) {
Arguments arguments;
try {
arguments = new Arguments.Builder().parseArgs(args).build();
} catch (Arguments.ArgumentParseException exception) {
System.err.println(exception.getMessage());
usage(1);
return;
} catch (IOException exception) {
System.err.printf("IOException encountered: %s%n", exception.getMessage());
return;
}
if (arguments.help) {
usage(0);
}
Generator generator;
try {
generator = new Generator(arguments.schemaFile, new Random());
} catch (IOException exception) {
System.err.printf("IOException encountered: %s%n", exception.getMessage());
return;
}
DataGenProducer dataProducer;
switch(arguments.format) {
case AVRO:
dataProducer = new AvroProducer(new KsqlConfig(Collections.singletonMap(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY, arguments.schemaRegistryUrl)));
break;
case JSON:
dataProducer = new JsonProducer();
break;
case DELIMITED:
dataProducer = new DelimitedProducer();
break;
default:
System.err.printf("Invalid format in '%s'; was expecting one of AVRO, JSON, or DELIMITED%n", arguments.format);
usage(1);
return;
}
Properties props = new Properties();
props.put("bootstrap.servers", arguments.bootstrapServer);
props.put("client.id", "KSQLDataGenProducer");
try {
if (arguments.propertiesFile != null) {
props.load(arguments.propertiesFile);
}
} catch (IOException exception) {
System.err.printf("IOException encountered: %s%n", exception.getMessage());
return;
}
dataProducer.populateTopic(props, generator, arguments.topicName, arguments.keyName, arguments.iterations, arguments.maxInterval);
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class EmbeddedKsql method main.
public static void main(String[] args) throws Exception {
KsqlContext ksqlContext = KsqlContext.create(new KsqlConfig(Collections.emptyMap()));
ksqlContext.sql("REGISTER TOPIC orders_topic WITH (format = 'json', " + "kafka_topic='orders_topic_json');");
ksqlContext.sql("CREATE STREAM orders (ordertime bigint, orderid bigint, itemid varchar, " + "orderunits double, arraycol array<double>, mapcol map<varchar, double>) " + "WITH (topicname = 'orders_topic' , key='orderid');\n");
ksqlContext.sql("CREATE STREAM BIGORDERS AS SELECT * FROM ORDERS WHERE ORDERUNITS > 5;");
ksqlContext.sql("SELECT * FROM ORDERS;");
ksqlContext.sql("CREATE TABLE ORDERSUMS AS select itemid, sum(orderunits) from orders window " + "TUMBLING ( size 30 second) group by itemid;");
System.out.println("Queries are running!");
}
Aggregations