use of com.hazelcast.map.impl.record.Record in project hazelcast by hazelcast.
the class TxnDeleteOperation method run.
@Override
public void run() {
recordStore.unlock(dataKey, ownerUuid, getThreadId(), getCallId());
Record record = recordStore.getRecord(dataKey);
if (record == null || version == record.getVersion()) {
dataOldValue = getNodeEngine().toData(recordStore.remove(dataKey));
successful = dataOldValue != null;
}
}
use of com.hazelcast.map.impl.record.Record in project hazelcast by hazelcast.
the class EntryOperator method onAddedOrUpdated.
private void onAddedOrUpdated() {
Object newValue = inMemoryFormat == OBJECT ? entry.getValue() : entry.getByPrioritizingDataValue();
if (backup) {
recordStore.putBackup(dataKey, newValue, entry.getNewTtl(), UNSET, UNSET, NOT_WAN);
} else {
recordStore.setWithUncountedAccess(dataKey, newValue, entry.getNewTtl(), UNSET);
if (mapOperation.isPostProcessing(recordStore)) {
Record record = recordStore.getRecord(dataKey);
newValue = record == null ? null : record.getValue();
entry.setValueByInMemoryFormat(inMemoryFormat, newValue);
}
mapServiceContext.interceptAfterPut(mapContainer.getInterceptorRegistry(), newValue);
stats.incrementPutLatencyNanos(Timer.nanosElapsed(startTimeNanos));
}
}
use of com.hazelcast.map.impl.record.Record in project hazelcast by hazelcast.
the class MapReplicationStateHolder method readRecordStoreData.
protected void readRecordStoreData(String mapName, ObjectDataInput in) throws IOException {
int numOfRecords = in.readInt();
List keyRecord = new ArrayList<>(numOfRecords * 3);
for (int j = 0; j < numOfRecords; j++) {
Data dataKey = IOUtil.readData(in);
Record record = Records.readRecord(in);
ExpiryMetadata expiryMetadata = Records.readExpiry(in);
keyRecord.add(dataKey);
keyRecord.add(record);
keyRecord.add(expiryMetadata);
}
LocalRecordStoreStatsImpl stats = new LocalRecordStoreStatsImpl();
stats.readData(in);
recordStoreStatsPerMapName.put(mapName, stats);
data.put(mapName, keyRecord);
}
use of com.hazelcast.map.impl.record.Record in project hazelcast by hazelcast.
the class MapReplicationStateHolder method writeData.
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(storesByMapName.size());
for (Map.Entry<String, RecordStore<Record>> entry : storesByMapName.entrySet()) {
String mapName = entry.getKey();
RecordStore<Record> recordStore = entry.getValue();
out.writeString(mapName);
writeRecordStore(mapName, recordStore, out);
recordStore.getStats().writeData(out);
}
out.writeInt(loaded.size());
for (Map.Entry<String, Boolean> loadedEntry : loaded.entrySet()) {
out.writeString(loadedEntry.getKey());
out.writeBoolean(loadedEntry.getValue());
}
out.writeInt(mapIndexInfos.size());
for (MapIndexInfo mapIndexInfo : mapIndexInfos) {
out.writeObject(mapIndexInfo);
}
}
use of com.hazelcast.map.impl.record.Record in project hazelcast by hazelcast.
the class MapChunk method writeChunk.
/**
* This method writes a map's main data, which is key-value
* pairs, to output stream.
* <p>
* If number of written bytes exceeds chunk limit, it stops writing.
* Next key-values pairs are written in subsequent chunks later.
*/
protected final void writeChunk(ObjectDataOutput out, MapChunkContext context) throws IOException {
SerializationService ss = context.getSerializationService();
long recordCount = 0;
out.writeString(context.getMapName());
Iterator<Map.Entry<Data, Record>> entries = context.getIterator();
while (entries.hasNext()) {
Map.Entry<Data, Record> entry = entries.next();
Data dataKey = entry.getKey();
Record record = entry.getValue();
Data dataValue = ss.toData(record.getValue());
IOUtil.writeData(out, dataKey);
Records.writeRecord(out, record, dataValue);
Records.writeExpiry(out, context.getExpiryMetadata(dataKey));
recordCount++;
if (isEndOfChunk.getAsBoolean()) {
break;
}
}
incrementReplicationRecordCount(recordCount);
if (!entries.hasNext()) {
incrementReplicationCount();
}
// indicates end of chunk
IOUtil.writeData(out, null);
}
Aggregations