use of org.apache.commons.lang3.RandomUtils.nextLong in project azure-tools-for-java by Microsoft.
the class PomXmlUpdater method formatElements.
private static String formatElements(String originalXml, LocationAwareElement parent, List<Element> newNodes) {
if (newNodes.isEmpty()) {
return originalXml;
}
final String[] originXmlLines = TextUtils.splitLines(originalXml);
final String baseIndent = Utils.calcXmlIndent(originXmlLines, parent.getLineNumber() - 1, parent.getColumnNumber() - 2);
final String placeHolder = String.format("@PLACEHOLDER_RANDOM_%s@", RandomUtils.nextLong());
final Text placeHolderNode = new DefaultText("\n" + placeHolder);
// replace target node to placeholder
Element newNode = newNodes.get(0);
parent.content().replaceAll(t -> t == newNode ? placeHolderNode : t);
newNode.setParent(null);
// remove all spaces before target node
XmlUtils.trimTextBeforeEnd(parent, placeHolderNode);
final String xmlWithPlaceholder = parent.getDocument().asXML();
final List<String> newXmlLines = new ArrayList(Arrays.asList(TextUtils.splitLines(XmlUtils.prettyPrintElementNoNamespace(newNode))));
for (int i = 1; i < newNodes.size(); i++) {
newXmlLines.addAll(Arrays.asList(TextUtils.splitLines(XmlUtils.prettyPrintElementNoNamespace(newNodes.get(i)))));
}
final String replacement = newXmlLines.stream().map(t -> baseIndent + " " + t).collect(Collectors.joining("\n")) + "\n" + baseIndent;
return xmlWithPlaceholder.replace(placeHolder, replacement);
}
use of org.apache.commons.lang3.RandomUtils.nextLong in project azure-tools-for-java by microsoft.
the class PomXmlUpdater method formatElements.
private static String formatElements(String originalXml, LocationAwareElement parent, List<Element> newNodes) {
if (newNodes.isEmpty()) {
return originalXml;
}
final String[] originXmlLines = TextUtils.splitLines(originalXml);
final String baseIndent = Utils.calcXmlIndent(originXmlLines, parent.getLineNumber() - 1, parent.getColumnNumber() - 2);
final String placeHolder = String.format("@PLACEHOLDER_RANDOM_%s@", RandomUtils.nextLong());
final Text placeHolderNode = new DefaultText("\n" + placeHolder);
// replace target node to placeholder
Element newNode = newNodes.get(0);
parent.content().replaceAll(t -> t == newNode ? placeHolderNode : t);
newNode.setParent(null);
// remove all spaces before target node
XmlUtils.trimTextBeforeEnd(parent, placeHolderNode);
final String xmlWithPlaceholder = parent.getDocument().asXML();
final List<String> newXmlLines = new ArrayList(Arrays.asList(TextUtils.splitLines(XmlUtils.prettyPrintElementNoNamespace(newNode))));
for (int i = 1; i < newNodes.size(); i++) {
newXmlLines.addAll(Arrays.asList(TextUtils.splitLines(XmlUtils.prettyPrintElementNoNamespace(newNodes.get(i)))));
}
final String replacement = newXmlLines.stream().map(t -> baseIndent + " " + t).collect(Collectors.joining("\n")) + "\n" + baseIndent;
return xmlWithPlaceholder.replace(placeHolder, replacement);
}
use of org.apache.commons.lang3.RandomUtils.nextLong in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostAvroMessageWithTemplateIdStrategy.
@Test
public void testPostAvroMessageWithTemplateIdStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config for AVRO message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
kafkaProperties.put("schema.registry.url", SCHEMA_REGISTRY_URL);
avroProducer = new KafkaProducer<>(kafkaProperties);
addAvroConfigs();
// Create sink connector with template ID strategy
connectConfig.withConfig("id.strategy", TemplateStrategy.class.getName()).withConfig("id.strategy.template", "${topic}-${key}");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
String id = RandomUtils.nextLong(1L, 9999999L) + "";
Person person = new Person("Lucy Ferr", id + "");
String keySchema = "{\"type\": \"record\",\"name\": \"key\",\"fields\":[{\"type\": \"string\",\"name\": \"key\"}]}}";
String valueSchema = "{\"type\": \"record\",\"fields\": " + " [{\"type\": \"string\",\"name\": \"id\"}, " + " {\"type\": \"string\",\"name\": \"name\"}], " + " \"optional\": false,\"name\": \"record\"}";
Schema.Parser parserKey = new Schema.Parser();
Schema schemaKey = parserKey.parse(keySchema);
GenericRecord avroKeyRecord = new GenericData.Record(schemaKey);
avroKeyRecord.put("key", person.getId() + "");
Schema.Parser parser = new Schema.Parser();
Schema schemaValue = parser.parse(valueSchema);
GenericRecord avroValueRecord = new GenericData.Record(schemaValue);
avroValueRecord.put("id", person.getId() + "");
avroValueRecord.put("name", person.getName());
ProducerRecord<GenericRecord, GenericRecord> personRecord = new ProducerRecord<>(KAFKA_TOPIC_AVRO, avroKeyRecord, avroValueRecord);
avroProducer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String cosmosId = KAFKA_TOPIC_AVRO + "-{\"key\":\"" + person.getId() + "\"}";
String sql = String.format("SELECT * FROM c where c.id = '%s'", cosmosId);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(cosmosId)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of org.apache.commons.lang3.RandomUtils.nextLong in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonMessageWithTemplateIdStrategy.
@Test
public void testPostJsonMessageWithTemplateIdStrategy() throws InterruptedException, ExecutionException {
// Configure Kafka Config
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with template ID strategy
connectConfig.withConfig("id.strategy", TemplateStrategy.class.getName()).withConfig("id.strategy.template", "${topic}-${key}");
connectClient.addConnector(connectConfig.build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
ObjectMapper om = new ObjectMapper();
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(kafkaTopicJson, person.getId() + "", om.valueToTree(person));
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String id = kafkaTopicJson + "-" + person.getId();
String sql = String.format("SELECT * FROM c where c.id = '%s'", id);
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(id)).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
use of org.apache.commons.lang3.RandomUtils.nextLong in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method testPostJsonWithSchemaMessage.
/**
* Post a valid JSON with schema message that should go through to CosmosDB.
* Then read the result from CosmosDB.
*/
@Test
public void testPostJsonWithSchemaMessage() throws InterruptedException, ExecutionException, JsonMappingException, JsonProcessingException {
// Configure Kafka Config for JSON with Schema message
Properties kafkaProperties = createKafkaProducerProperties();
kafkaProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName());
producer = new KafkaProducer<>(kafkaProperties);
// Create sink connector with Schema enabled JSON config
connectClient.addConnector(connectConfig.withConfig("value.converter.schemas.enable", "true").withConfig("topics", KAFKA_TOPIC_JSON_SCHEMA).withConfig("connect.cosmos.containers.topicmap", KAFKA_TOPIC_JSON_SCHEMA + "#kafka").build());
// Send Kafka message to topic
logger.debug("Sending Kafka message to " + kafkaProperties.getProperty("bootstrap.servers"));
Person person = new Person("Lucy Ferr", RandomUtils.nextLong(1L, 9999999L) + "");
String jsonSchemaString = "{\"schema\":{\"type\":\"struct\",\"fields\":[{" + "\"type\": \"string\",\"field\": \"id\"}," + "{\"type\": \"string\",\"field\": \"name\"}]," + "\"name\": \"records\"},\"payload\": {" + "\"id\":\"" + person.getId() + "\",\"name\":\"" + person.getName() + "\"}}";
ObjectMapper om = new ObjectMapper();
JsonNode jsonSchemaNode = om.readTree(jsonSchemaString);
ProducerRecord<String, JsonNode> personRecord = new ProducerRecord<>(KAFKA_TOPIC_JSON_SCHEMA, person.getId(), jsonSchemaNode);
producer.send(personRecord).get();
// Wait a few seconds for the sink connector to push data to Cosmos DB
sleep(8000);
// Query Cosmos DB for data
String sql = String.format("SELECT * FROM c where c.id = '%s'", person.getId() + "");
CosmosPagedIterable<Person> readResponse = targetContainer.queryItems(sql, new CosmosQueryRequestOptions(), Person.class);
Optional<Person> retrievedPerson = readResponse.stream().filter(p -> p.getId().equals(person.getId())).findFirst();
Assert.assertNotNull("Person could not be retrieved", retrievedPerson.orElse(null));
}
Aggregations