use of com.azure.cosmos.util.CosmosPagedIterable in project azure-cosmos-java-sql-api-samples by Azure-Samples.
the class CosmosDiagnosticsQuickStart method queryDocuments.
private void queryDocuments() throws Exception {
logger.info("Query documents in the container : {}", containerName);
String sql = "SELECT * FROM c WHERE c.lastName = 'Witherspoon'";
CosmosPagedIterable<Family> filteredFamilies = container.queryItems(sql, new CosmosQueryRequestOptions(), Family.class);
// Add handler to capture diagnostics
filteredFamilies = filteredFamilies.handle(familyFeedResponse -> {
logger.info("Query Item diagnostics through handler : {}", familyFeedResponse.getCosmosDiagnostics());
});
// Or capture diagnostics through iterableByPage() APIs.
filteredFamilies.iterableByPage().forEach(familyFeedResponse -> {
logger.info("Query item diagnostics through iterableByPage : {}", familyFeedResponse.getCosmosDiagnostics());
});
logger.info("Done.");
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostAvroMessageWithTemplateIdStrategy.
@Test
public void testPostAvroMessageWithTemplateIdStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config for AVRO message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put("schema.registry.url", SCHEMA_REGISTRY_URL);
avroProducer = new KafkaProducer<>(kafkaProperties);
addAvroConfigs();
// Create sink connector with template ID strategy
connectConfig.withConfig("id.strategy", TemplateStrategy.class.getName()).withConfig("id.strategy.template", "${topic}-${key}");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
String id = RandomUtils.nextLong(1L, 9999999L) + "";
Person person = new Person("Lucy Ferr", id + "");
String keySchema = "{\"type\": \"record\",\"name\": \"key\",\"fields\":[{\"type\": \"string\",\"name\": \"key\"}]}}";
String valueSchema = "{\"type\": \"record\",\"fields\": " + " [{\"type\": \"string\",\"name\": \"id\"}, " + " {\"type\": \"string\",\"name\": \"name\"}], " + " \"optional\": false,\"name\": \"record\"}";
Schema.Parser parserKey = new Schema.Parser();
Schema schemaKey = parserKey.parse(keySchema);
GenericRecord avroKeyRecord = new GenericData.Record(schemaKey);
avroKeyRecord.put("key", person.getId() + "");
Schema.Parser parser = new Schema.Parser();
Schema schemaValue = parser.parse(valueSchema);
GenericRecord avroValueRecord = new GenericData.Record(schemaValue);
avroValueRecord.put("id", person.getId() + "");
avroValueRecord.put("name", person.getName());
ProducerRecord<GenericRecord, GenericRecord> personRecord = new ProducerRecord<>(KAFKA_TOPIC_AVRO, avroKeyRecord, avroValueRecord);
avroProducer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String cosmosId = KAFKA_TOPIC_AVRO + "-{\"key\":\"" + person.getId() + "\"}";
String sql = String.format("SELECT * FROM c where c.id = '%s'", cosmosId);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(cosmosId)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonMessageWithTemplateIdStrategy.
@Test
public void testPostJsonMessageWithTemplateIdStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with template ID strategy
connectConfig.withConfig("id.strategy", TemplateStrategy.class.getName()).withConfig("id.strategy.template", "${topic}-${key}");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
ObjectMapper om = new ObjectMapper();
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(kafkaTopicJson, person.getId() + "", om.valueToTree(person));
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String id = kafkaTopicJson + "-" + person.getId();
String sql = String.format("SELECT * FROM c where c.id = '%s'", id);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(id)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonWithSchemaMessage.
/**
* Post a valid JSON with schema message that should go through to CosmosDB.
* Then read the result from CosmosDB.
*/
@Test
public void testPostJsonWithSchemaMessage() throws InterruptedException, ExecutionException, JsonMappingException, JsonProcessingException {
// Configure Kafka Config for JSON with Schema message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with Schema enabled JSON config
connectClient.addConnector(connectConfig.withConfig("value.converter.schemas.enable", "true").withConfig("topics", KAFKA_TOPIC_JSON_SCHEMA).withConfig("connect.cosmos.containers.topicmap", KAFKA_TOPIC_JSON_SCHEMA + "#kafka").build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
String jsonSchemaString = "{\"schema\":{\"type\":\"struct\",\"fields\":[{" + "\"type\": \"string\",\"field\": \"id\"}," + "{\"type\": \"string\",\"field\": \"name\"}]," + "\"name\": \"records\"},\"payload\": {" + "\"id\":\"" + person.getId() + "\",\"name\":\"" + person.getName() + "\"}}";
ObjectMapper om = new ObjectMapper();
JsonNode jsonSchemaNode = om.readTree(jsonSchemaString);
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(KAFKA_TOPIC_JSON_SCHEMA, person.getId(), jsonSchemaNode);
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String sql = String.format("SELECT * FROM c where c.id = '%s'", person.getId() + "");
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of com.azure.cosmos.util.CosmosPagedIterable in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostAvroMessageWithJsonPathInProvidedInKeyStrategy.
@Test
public void testPostAvroMessageWithJsonPathInProvidedInKeyStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config for AVRO message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put("schema.registry.url", SCHEMA_REGISTRY_URL);
avroProducer = new KafkaProducer<>(kafkaProperties);
addAvroConfigs();
// Create sink connector with template ID strategy
connectConfig.withConfig("id.strategy", ProvidedInKeyStrategy.class.getName()).withConfig("id.strategy.jsonPath", "$.key");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
String id = RandomUtils.nextLong(1L, 9999999L) + "";
Person person = new Person("Lucy Ferr", id + "");
String keySchema = "{\"type\": \"record\",\"name\": \"key\",\"fields\":[{\"type\": \"string\",\"name\": \"key\"}]}}";
String valueSchema = "{\"type\": \"record\",\"fields\": " + " [{\"type\": \"string\",\"name\": \"id\"}, " + " {\"type\": \"string\",\"name\": \"name\"}], " + " \"optional\": false,\"name\": \"record\"}";
Schema.Parser parserKey = new Schema.Parser();
Schema schemaKey = parserKey.parse(keySchema);
GenericRecord avroKeyRecord = new GenericData.Record(schemaKey);
avroKeyRecord.put("key", person.getId() + "");
Schema.Parser parser = new Schema.Parser();
Schema schemaValue = parser.parse(valueSchema);
GenericRecord avroValueRecord = new GenericData.Record(schemaValue);
avroValueRecord.put("id", person.getId() + "");
avroValueRecord.put("name", person.getName());
ProducerRecord<GenericRecord, GenericRecord> personRecord = new ProducerRecord<>(KAFKA_TOPIC_AVRO, avroKeyRecord, avroValueRecord);
avroProducer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String cosmosId = person.getId();
String sql = String.format("SELECT * FROM c where c.id = '%s'", cosmosId);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(cosmosId)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
Aggregations