use of com.hazelcast.map.impl.MapServiceContext in project hazelcast by hazelcast.
the class MapChunk method writeWriteBehindState.
private void writeWriteBehindState(ObjectDataOutput out, RecordStore recordStore) throws IOException {
MapContainer mapContainer = recordStore.getMapContainer();
MapConfig mapConfig = mapContainer.getMapConfig();
if (mapConfig.getTotalBackupCount() < getReplicaIndex() || !mapContainer.getMapStoreContext().isWriteBehindMapStoreEnabled()) {
// we don't have hasWriteBehindState
out.writeBoolean(false);
return;
}
// we have hasWriteBehindState
out.writeBoolean(true);
MapServiceContext mapServiceContext = recordStore.getMapContainer().getMapServiceContext();
WriteBehindStore mapDataStore = (WriteBehindStore) recordStore.getMapDataStore();
// write delayed entries
List<DelayedEntry> delayedEntries = mapDataStore.getWriteBehindQueue().asList();
out.writeInt(delayedEntries.size());
for (DelayedEntry e : delayedEntries) {
Data key = mapServiceContext.toData(e.getKey());
Data value = mapServiceContext.toData(e.getValue());
long expirationTime = e.getExpirationTime();
IOUtil.writeData(out, key);
IOUtil.writeData(out, value);
out.writeLong(expirationTime);
out.writeLong(e.getStoreTime());
out.writeInt(e.getPartitionId());
out.writeLong(e.getSequence());
UUIDSerializationUtil.writeUUID(out, e.getTxnId());
}
// write sequences
Deque<WriteBehindStore.Sequence> sequences = new ArrayDeque<>(mapDataStore.getFlushSequences());
out.writeInt(sequences.size());
for (WriteBehindStore.Sequence sequence : sequences) {
out.writeLong(sequence.getSequence());
out.writeBoolean(sequence.isFullFlush());
}
// write txn reservations
Map<UUID, Long> reservationsByTxnId = mapDataStore.getTxnReservedCapacityCounter().getReservedCapacityCountPerTxnId();
out.writeInt(reservationsByTxnId.size());
for (Map.Entry<UUID, Long> counterByTxnId : reservationsByTxnId.entrySet()) {
writeUUID(out, counterByTxnId.getKey());
out.writeLong(counterByTxnId.getValue());
}
}
use of com.hazelcast.map.impl.MapServiceContext in project hazelcast by hazelcast.
the class PostJoinMapOperation method createQueryCaches.
private void createQueryCaches() {
MapService mapService = getService();
MapServiceContext mapServiceContext = mapService.getMapServiceContext();
QueryCacheContext queryCacheContext = mapServiceContext.getQueryCacheContext();
PublisherContext publisherContext = queryCacheContext.getPublisherContext();
MapPublisherRegistry mapPublisherRegistry = publisherContext.getMapPublisherRegistry();
for (AccumulatorInfo info : infoList) {
addAccumulatorInfo(queryCacheContext, info);
PublisherRegistry publisherRegistry = mapPublisherRegistry.getOrCreate(info.getMapName());
publisherRegistry.getOrCreate(info.getCacheId());
// marker listener.
mapServiceContext.addLocalListenerAdapter((ListenerAdapter<IMapEvent>) event -> {
}, info.getMapName());
}
}
use of com.hazelcast.map.impl.MapServiceContext in project hazelcast by hazelcast.
the class WriteBehindStateHolder method writeData.
@Override
public void writeData(ObjectDataOutput out) throws IOException {
MapService mapService = mapReplicationOperation.getService();
MapServiceContext mapServiceContext = mapService.getMapServiceContext();
out.writeInt(delayedEntries.size());
for (Map.Entry<String, List<DelayedEntry>> entry : delayedEntries.entrySet()) {
out.writeString(entry.getKey());
List<DelayedEntry> delayedEntryList = entry.getValue();
out.writeInt(delayedEntryList.size());
for (DelayedEntry e : delayedEntryList) {
Data key = mapServiceContext.toData(e.getKey());
Data value = mapServiceContext.toData(e.getValue());
long expirationTime = e.getExpirationTime();
IOUtil.writeData(out, key);
IOUtil.writeData(out, value);
out.writeLong(expirationTime);
out.writeLong(e.getStoreTime());
out.writeInt(e.getPartitionId());
out.writeLong(e.getSequence());
UUIDSerializationUtil.writeUUID(out, e.getTxnId());
}
}
out.writeInt(flushSequences.size());
for (Map.Entry<String, Queue<WriteBehindStore.Sequence>> entry : flushSequences.entrySet()) {
out.writeString(entry.getKey());
Queue<WriteBehindStore.Sequence> queue = entry.getValue();
out.writeInt(queue.size());
for (WriteBehindStore.Sequence sequence : queue) {
out.writeLong(sequence.getSequence());
out.writeBoolean(sequence.isFullFlush());
}
}
out.writeInt(reservationsByTxnIdPerMap.size());
for (Map.Entry<String, Map<UUID, Long>> entry : reservationsByTxnIdPerMap.entrySet()) {
out.writeString(entry.getKey());
Map<UUID, Long> reservationsByTxnId = entry.getValue();
out.writeInt(reservationsByTxnId.size());
for (Map.Entry<UUID, Long> counterByTxnId : reservationsByTxnId.entrySet()) {
writeUUID(out, counterByTxnId.getKey());
out.writeLong(counterByTxnId.getValue());
}
}
}
use of com.hazelcast.map.impl.MapServiceContext in project hazelcast by hazelcast.
the class MadePublishableOperation method getContext.
private QueryCacheContext getContext() {
MapService service = getService();
MapServiceContext mapServiceContext = service.getMapServiceContext();
return mapServiceContext.getQueryCacheContext();
}
use of com.hazelcast.map.impl.MapServiceContext in project hazelcast by hazelcast.
the class ClientMapInvalidationMetadataDistortionTest method distortRandomPartitionUuid.
private void distortRandomPartitionUuid(HazelcastInstance member) {
NodeEngineImpl nodeEngineImpl = getNodeEngineImpl(member);
int partitionCount = nodeEngineImpl.getPartitionService().getPartitionCount();
int partitionId = getInt(partitionCount);
MapService mapService = nodeEngineImpl.getService(SERVICE_NAME);
MapServiceContext mapServiceContext = mapService.getMapServiceContext();
MapNearCacheManager mapNearCacheManager = mapServiceContext.getMapNearCacheManager();
Invalidator invalidator = mapNearCacheManager.getInvalidator();
MetaDataGenerator metaDataGenerator = invalidator.getMetaDataGenerator();
metaDataGenerator.setUuid(partitionId, UuidUtil.newUnsecureUUID());
}
Aggregations