use of com.hazelcast.instance.impl.Node in project hazelcast by hazelcast.
the class ClusterInfoTest method test_start_time_single_node_cluster.
@Test
public void test_start_time_single_node_cluster() {
HazelcastInstance h1 = factory.newHazelcastInstance();
Node node1 = getNode(h1);
assertNotEquals(Long.MIN_VALUE, node1.getClusterService().getClusterClock().getClusterStartTime());
}
use of com.hazelcast.instance.impl.Node in project hazelcast by hazelcast.
the class ClusterShutdownTest method testClusterShutdownWithMultipleMembers.
private void testClusterShutdownWithMultipleMembers(int clusterSize, int nodeCountToTriggerShutdown) {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(clusterSize);
HazelcastInstance[] instances = factory.newInstances();
assertClusterSizeEventually(clusterSize, instances);
instances[0].getCluster().changeClusterState(ClusterState.PASSIVE);
Node[] nodes = getNodes(instances);
final CountDownLatch latch = new CountDownLatch(1);
for (int i = 0; i < nodeCountToTriggerShutdown; i++) {
final HazelcastInstance instance = instances[i];
final Runnable shutdownRunnable = new Runnable() {
@Override
public void run() {
assertOpenEventually(latch);
instance.getCluster().shutdown();
}
};
new Thread(shutdownRunnable).start();
}
latch.countDown();
assertNodesShutDownEventually(nodes);
}
use of com.hazelcast.instance.impl.Node in project hazelcast by hazelcast.
the class DistributedObjectCounterCollector method forEachMetric.
@Override
public void forEachMetric(Node node, BiConsumer<PhoneHomeMetrics, String> metricsConsumer) {
InternalProxyService proxyService = node.nodeEngine.getProxyService();
Map<String, Long> objectsPerService = proxyService.getAllDistributedObjects().stream().filter(obj -> INTERNAL_OBJECTS_PREFIXES.stream().noneMatch(prefix -> obj.getName().startsWith(prefix))).filter(obj -> SERVICE_NAME_TO_METRIC_NAME.containsKey(obj.getServiceName())).collect(groupingBy(DistributedObject::getServiceName, Collectors.counting()));
SERVICE_NAME_TO_METRIC_NAME.forEach((serviceName, metricNames) -> {
metricsConsumer.accept(metricNames[0], String.valueOf(objectsPerService.getOrDefault(serviceName, 0L)));
metricsConsumer.accept(metricNames[1], String.valueOf(proxyService.getCreatedCount(serviceName)));
});
}
use of com.hazelcast.instance.impl.Node in project hazelcast by hazelcast.
the class CachePutAllTest method testPutAll.
@Test
public void testPutAll() {
ICache<String, String> cache = createCache();
String cacheName = cache.getName();
Map<String, String> entries = createAndFillEntries();
cache.putAll(entries);
// Verify that put-all works
for (Map.Entry<String, String> entry : entries.entrySet()) {
String key = entry.getKey();
String expectedValue = entries.get(key);
String actualValue = cache.get(key);
assertEquals(expectedValue, actualValue);
}
Node node = getNode(hazelcastInstance);
InternalPartitionService partitionService = node.getPartitionService();
SerializationService serializationService = node.getSerializationService();
// Verify that backup of put-all works
for (Map.Entry<String, String> entry : entries.entrySet()) {
String key = entry.getKey();
String expectedValue = entries.get(key);
Data keyData = serializationService.toData(key);
int keyPartitionId = partitionService.getPartitionId(keyData);
for (int i = 0; i < INSTANCE_COUNT; i++) {
Node n = getNode(hazelcastInstances[i]);
ICacheService cacheService = n.getNodeEngine().getService(ICacheService.SERVICE_NAME);
ICacheRecordStore recordStore = cacheService.getRecordStore("/hz/" + cacheName, keyPartitionId);
assertNotNull(recordStore);
String actualValue = serializationService.toObject(recordStore.get(keyData, null));
assertEquals(expectedValue, actualValue);
}
}
}
use of com.hazelcast.instance.impl.Node in project hazelcast by hazelcast.
the class CachePutAllTest method createAndFillEntries.
private Map<String, String> createAndFillEntries() {
final int ENTRY_COUNT_PER_PARTITION = 3;
Node node = getNode(hazelcastInstance);
int partitionCount = node.getPartitionService().getPartitionCount();
Map<String, String> entries = new HashMap<String, String>(partitionCount * ENTRY_COUNT_PER_PARTITION);
for (int partitionId = 0; partitionId < partitionCount; partitionId++) {
for (int i = 0; i < ENTRY_COUNT_PER_PARTITION; i++) {
String key = generateKeyForPartition(hazelcastInstance, partitionId);
String value = generateRandomString(16);
entries.put(key, value);
}
}
return entries;
}
Aggregations