use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonMessageWithTTL.
/**
* Post a JSON message with TTL enabled. First check that it exists in CosmosDB.
* Then, wait a few seconds, read again from CosmosDB to ensure item expired.
*/
@Test
public void testPostJsonMessageWithTTL() throws InterruptedException, ExecutionException {
// Configure Kafka Config
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
// Create sink connector with added TTL configs
connectClient.addConnector(connectConfig.withConfig("transforms", "insertTTL,castTTLInt").withConfig("transforms.insertTTL.type", "org.apache.kafka.connect.transforms.InsertField$Value").withConfig("transforms.insertTTL.static.field", "ttl").withConfig("transforms.insertTTL.static.value", "5").withConfig("transforms.castTTLInt.type", "org.apache.kafka.connect.transforms.Cast$Value").withConfig("transforms.castTTLInt.spec", "ttl:int32").build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
ObjectMapper om = new ObjectMapper();
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(kafkaTopicJson, person.getId() + "", om.valueToTree(person));
producer = new KafkaProducer<>(kafkaProperties);
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(5000);
// Query Cosmos DB for data and check Person exists
String sql = String.format("SELECT * FROM c where c.id = '%s'", person.getId());
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
// Wait a few seconds for records to die down in Cosmos DB
sleep(5000);
// Query Cosmos again and check that person does not exist anymore
readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertFalse("Record still in DB", retrievedPerson.isPresent());
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostAvroMessage.
/**
* Post a valid AVRO message that should go through to CosmosDB.
* Then read the result from CosmosDB.
*/
@Test
public void testPostAvroMessage() throws InterruptedException, ExecutionException {
// Configure Kafka Config for AVRO message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put("schema.registry.url", SCHEMA_REGISTRY_URL);
avroProducer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with AVRO config
addAvroConfigs();
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
String id = RandomUtils.nextLong(1L, 9999999L) + "";
Person person = new Person("Lucy Ferr", id + "");
String keySchema = "{\"type\": \"record\",\"name\": \"key\",\"fields\":[{\"type\": \"string\",\"name\": \"key\"}]}}";
String valueSchema = "{\"type\": \"record\",\"fields\": " + " [{\"type\": \"string\",\"name\": \"id\"}, " + " {\"type\": \"string\",\"name\": \"name\"}], " + " \"optional\": false,\"name\": \"record\"}";
Schema.Parser parserKey = new Schema.Parser();
Schema schemaKey = parserKey.parse(keySchema);
GenericRecord avroKeyRecord = new GenericData.Record(schemaKey);
avroKeyRecord.put("key", person.getId() + "");
Schema.Parser parser = new Schema.Parser();
Schema schemaValue = parser.parse(valueSchema);
GenericRecord avroValueRecord = new GenericData.Record(schemaValue);
avroValueRecord.put("id", person.getId() + "");
avroValueRecord.put("name", person.getName());
ProducerRecord<GenericRecord, GenericRecord> personRecord = new ProducerRecord<>(KAFKA_TOPIC_AVRO, avroKeyRecord, avroValueRecord);
avroProducer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String sql = String.format("SELECT * FROM c where c.id = '%s'", person.getId() + "");
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonMessageWithJsonPathInProvidedInValueStrategy.
@Test
public void testPostJsonMessageWithJsonPathInProvidedInValueStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with provided in value ID strategy and a json path
connectConfig.withConfig("id.strategy", ProvidedInValueStrategy.class.getName()).withConfig("id.strategy.jsonPath", "$.name");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
ObjectMapper om = new ObjectMapper();
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(kafkaTopicJson, person.getId() + "", om.valueToTree(person));
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String id = person.getName();
String sql = String.format("SELECT * FROM c where c.id = '%s'", id);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(id)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonMessage.
/**
* Post a valid JSON message that should go through to CosmosDB.
* Then read the result from CosmosDB.
*/
@Test
public void testPostJsonMessage() throws InterruptedException, ExecutionException {
// Configure Kafka Config
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with default config
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
ObjectMapper om = new ObjectMapper();
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(kafkaTopicJson, person.getId() + "", om.valueToTree(person));
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String sql = String.format("SELECT * FROM c where c.id = '%s'", person.getId());
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
Aggregations