use of com.hazelcast.core.HazelcastException in project hazelcast by hazelcast.
the class ClientSmartInvocationServiceImpl method ensureOwnerConnectionAvailable.
private void ensureOwnerConnectionAvailable() throws IOException {
ClientClusterService clientClusterService = client.getClientClusterService();
Address ownerConnectionAddress = clientClusterService.getOwnerConnectionAddress();
boolean isOwnerConnectionAvailable = ownerConnectionAddress != null && connectionManager.getConnection(ownerConnectionAddress) != null;
if (!isOwnerConnectionAvailable) {
if (isShutdown()) {
throw new HazelcastException("ConnectionManager is not active!");
}
throw new IOException("Not able to setup owner connection!");
}
}
use of com.hazelcast.core.HazelcastException in project hazelcast by hazelcast.
the class XmlConfigLocator method loadFromWorkingDirectory.
private boolean loadFromWorkingDirectory() {
File file = new File("hazelcast.xml");
if (!file.exists()) {
LOGGER.finest("Could not find 'hazelcast.xml' in working directory.");
return false;
}
LOGGER.info("Loading 'hazelcast.xml' from working directory.");
configurationFile = file;
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new HazelcastException("Failed to open file: " + file.getAbsolutePath(), e);
}
return true;
}
use of com.hazelcast.core.HazelcastException in project hazelcast by hazelcast.
the class MapProxyImpl method aggregate.
@Override
public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) {
checkTrue(NATIVE != mapConfig.getInMemoryFormat(), "NATIVE storage format is not supported for MapReduce");
try {
isNotNull(jobTracker, "jobTracker");
KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMap(this);
Job<K, V> job = jobTracker.newJob(keyValueSource);
Mapper mapper = aggregation.getMapper(supplier);
CombinerFactory combinerFactory = aggregation.getCombinerFactory();
ReducerFactory reducerFactory = aggregation.getReducerFactory();
Collator collator = aggregation.getCollator();
MappingJob mappingJob = job.mapper(mapper);
ReducingSubmittableJob reducingJob;
if (combinerFactory == null) {
reducingJob = mappingJob.reducer(reducerFactory);
} else {
reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory);
}
ICompletableFuture<Result> future = reducingJob.submit(collator);
return future.get();
} catch (Exception e) {
// TODO: not what we want, because it can lead to wrapping of HazelcastException
throw new HazelcastException(e);
}
}
use of com.hazelcast.core.HazelcastException in project hazelcast by hazelcast.
the class ClientMapProxy method aggregate.
@Override
public <SuppliedValue, Result> Result aggregate(Supplier<K, V, SuppliedValue> supplier, Aggregation<K, SuppliedValue, Result> aggregation, JobTracker jobTracker) {
try {
Preconditions.isNotNull(jobTracker, "jobTracker");
KeyValueSource<K, V> keyValueSource = KeyValueSource.fromMap(this);
Job<K, V> job = jobTracker.newJob(keyValueSource);
Mapper mapper = aggregation.getMapper(supplier);
CombinerFactory combinerFactory = aggregation.getCombinerFactory();
ReducerFactory reducerFactory = aggregation.getReducerFactory();
Collator collator = aggregation.getCollator();
MappingJob mappingJob = job.mapper(mapper);
ReducingSubmittableJob reducingJob;
if (combinerFactory != null) {
reducingJob = mappingJob.combiner(combinerFactory).reducer(reducerFactory);
} else {
reducingJob = mappingJob.reducer(reducerFactory);
}
ICompletableFuture<Result> future = reducingJob.submit(collator);
return future.get();
} catch (Exception e) {
throw new HazelcastException(e);
}
}
use of com.hazelcast.core.HazelcastException in project hazelcast by hazelcast.
the class QueueContainer method mapDrainIterator.
public void mapDrainIterator(int maxSize, Map map) {
Iterator<QueueItem> iter = getItemQueue().iterator();
for (int i = 0; i < maxSize; i++) {
QueueItem item = iter.next();
if (store.isEnabled() && item.getData() == null) {
try {
load(item);
} catch (Exception e) {
throw new HazelcastException(e);
}
}
map.put(item.getItemId(), item.getData());
}
}
Aggregations