use of com.azure.cosmos.models.CosmosContainerProperties in project scalardb by scalar-labs.
the class CosmosAdmin method createMetadataDatabaseAndContainerIfNotExists.
private void createMetadataDatabaseAndContainerIfNotExists() {
ThroughputProperties manualThroughput = ThroughputProperties.createManualThroughput(Integer.parseInt(DEFAULT_REQUEST_UNIT));
client.createDatabaseIfNotExists(metadataDatabase, manualThroughput);
CosmosContainerProperties containerProperties = new CosmosContainerProperties(METADATA_CONTAINER, "/id");
client.getDatabase(metadataDatabase).createContainerIfNotExists(containerProperties);
}
use of com.azure.cosmos.models.CosmosContainerProperties in project cas by apereo.
the class CosmosDbObjectFactory method createContainer.
/**
* Create container.
*
* @param name the name
* @param partitionKey the partition key
*/
public void createContainer(final String name, final String partitionKey) {
val database = client.getDatabase(properties.getDatabase());
LOGGER.debug("Creating CosmosDb container [{}]", name);
val containerProperties = new CosmosContainerProperties(name, '/' + partitionKey);
containerProperties.setIndexingPolicy(new IndexingPolicy().setIndexingMode(IndexingMode.valueOf(properties.getIndexingMode())));
val response = database.createContainerIfNotExists(containerProperties);
LOGGER.debug("Created CosmosDb container [{}]", response.getProperties().getId());
}
use of com.azure.cosmos.models.CosmosContainerProperties in project kafka-connect-cosmosdb by microsoft.
the class SinkConnectorIT method before.
/**
* Load CosmosDB configuration from the connector config JSON and set up CosmosDB client.
* Create an embedded Kafka Connect cluster.
*/
@Before
public void before() throws URISyntaxException, IOException {
// Load the sink.config.json config file
URL configFileUrl = SinkConnectorIT.class.getClassLoader().getResource("sink.config.json");
JsonNode config = new ObjectMapper().readTree(configFileUrl);
connectorName = config.get("name").textValue();
config = config.get("config");
String topicContainerMap = config.get("connect.cosmos.containers.topicmap").textValue();
kafkaTopicJson = StringUtils.substringBefore(topicContainerMap, "#");
String containerName = StringUtils.substringAfter(topicContainerMap, "#");
// Setup Cosmos Client
logger.debug("Setting up the Cosmos DB client");
cosmosClient = new CosmosClientBuilder().endpoint(config.get("connect.cosmos.connection.endpoint").textValue()).key(config.get("connect.cosmos.master.key").textValue()).buildClient();
// Create CosmosDB database if not exists
databaseName = config.get("connect.cosmos.databasename").textValue();
cosmosClient.createDatabaseIfNotExists(databaseName);
CosmosDatabase targetDatabase = cosmosClient.getDatabase(databaseName);
// Create Cosmos Container if not exists
CosmosContainerProperties containerProperties = new CosmosContainerProperties(containerName, "/id");
containerProperties.setDefaultTimeToLiveInSeconds(-1);
targetDatabase.createContainerIfNotExists(containerProperties, ThroughputProperties.createManualThroughput(400));
targetContainer = targetDatabase.getContainer(containerName);
// Setup Kafka Connect Client and connector config
logger.debug("Setting up the Kafka Connect client");
connectClient = new KafkaConnectClient(new Configuration(CONNECT_CLIENT_URL));
setupConnectorConfig(config);
}
use of com.azure.cosmos.models.CosmosContainerProperties in project kafka-connect-cosmosdb by microsoft.
the class SourceConnectorIT method before.
/**
* Load CosmosDB configuration from the connector config JSON and set up CosmosDB client.
* Create an embedded Kafka Connect cluster.
*/
@Before
public void before() throws URISyntaxException, IOException {
// Load the source.config.json config file
URL configFileUrl = SourceConnectorIT.class.getClassLoader().getResource("source.config.json");
JsonNode config = new ObjectMapper().readTree(configFileUrl);
connectorName = config.get("name").textValue();
config = config.get("config");
String topicContainerMap = config.get("connect.cosmos.containers.topicmap").textValue();
String topic = StringUtils.substringBefore(topicContainerMap, "#");
String containerName = StringUtils.substringAfter(topicContainerMap, "#");
// Setup Cosmos Client
logger.debug("Setting up the Cosmos DB client");
cosmosClient = new CosmosClientBuilder().endpoint(config.get("connect.cosmos.connection.endpoint").textValue()).key(config.get("connect.cosmos.master.key").textValue()).buildClient();
// Create CosmosDB database if not exists
databaseName = config.get("connect.cosmos.databasename").textValue();
cosmosClient.createDatabaseIfNotExists(databaseName);
CosmosDatabase targetDatabase = cosmosClient.getDatabase(databaseName);
// Create Cosmos Containers (one from config, another for testing multiple workers) if they do not exist
CosmosContainerProperties containerProperties = new CosmosContainerProperties(containerName, "/id");
containerProperties.setDefaultTimeToLiveInSeconds(-1);
targetDatabase.createContainerIfNotExists(containerProperties, ThroughputProperties.createManualThroughput(400));
containerProperties.setId(SECOND_COSMOS_CONTAINER);
targetDatabase.createContainerIfNotExists(containerProperties, ThroughputProperties.createManualThroughput(400));
targetContainer = targetDatabase.getContainer(containerName);
secondContainer = targetDatabase.getContainer(SECOND_COSMOS_CONTAINER);
// Setup Kafka Connect Client and connector config
logger.debug("Setting up the Kafka Connect client");
connectClient = new KafkaConnectClient(new Configuration(CONNECT_CLIENT_URL));
setupConnectorConfig(config);
// Create Kafka Consumer subscribed to topics, recordBuffer to store records from topics
Properties kafkaProperties = createKafkaConsumerProperties();
kafkaProperties.put("value.deserializer", JsonDeserializer.class.getName());
consumer = new KafkaConsumer<>(kafkaProperties);
consumer.subscribe(Arrays.asList(topic, SECOND_KAFKA_TOPIC));
// Create Kafka Consumer subscribed to AVRO topic, avroRecordBuffer to store records from AVRO topic
Properties kafkaAvroProperties = createKafkaConsumerProperties();
kafkaAvroProperties.put("value.deserializer", KafkaAvroDeserializer.class.getName());
kafkaAvroProperties.put("schema.registry.url", SCHEMA_REGISTRY_URL);
avroConsumer = new KafkaConsumer<>(kafkaAvroProperties);
avroConsumer.subscribe(Arrays.asList(AVRO_KAFKA_TOPIC));
logger.debug("Consuming Kafka messages from " + kafkaProperties.getProperty("bootstrap.servers"));
recordBuffer = new ArrayList<>();
avroRecordBuffer = new ArrayList<>();
}
use of com.azure.cosmos.models.CosmosContainerProperties in project azure-cosmos-java-sql-api-samples by Azure-Samples.
the class SampleCRUDQuickstartAsync method createContainerIfNotExists.
private void createContainerIfNotExists() throws Exception {
logger.info("Create container " + containerName + " if not exists.");
// Create container if not exists
// <CreateContainerIfNotExists>
CosmosContainerProperties containerProperties = new CosmosContainerProperties(containerName, "/lastName");
ThroughputProperties throughputProperties = ThroughputProperties.createManualThroughput(400);
Mono<CosmosContainerResponse> containerIfNotExists = database.createContainerIfNotExists(containerProperties, throughputProperties);
// Create container with 400 RU/s
CosmosContainerResponse cosmosContainerResponse = containerIfNotExists.block();
container = database.getContainer(cosmosContainerResponse.getProperties().getId());
// </CreateContainerIfNotExists>
// Modify existing container
containerProperties = cosmosContainerResponse.getProperties();
Mono<CosmosContainerResponse> propertiesReplace = container.replace(containerProperties, new CosmosContainerRequestOptions());
propertiesReplace.flatMap(containerResponse -> {
logger.info("setupContainer(): Container " + container.getId() + " in " + database.getId() + "has been updated with it's new properties.");
return Mono.empty();
}).onErrorResume((exception) -> {
logger.error("setupContainer(): Unable to update properties for container " + container.getId() + " in database " + database.getId() + ". e: " + exception.getLocalizedMessage());
return Mono.empty();
}).block();
}
Aggregations