use of com.hazelcast.internal.nio.BufferObjectDataOutput in project hazelcast by hazelcast.
the class RabinFingerprintTest method testRabinFingerprintIsConsistentWithWrittenData.
@Test
public void testRabinFingerprintIsConsistentWithWrittenData() throws IOException {
SchemaWriter writer = new SchemaWriter("typeName");
writer.addField(new FieldDescriptor("a", FieldKind.BOOLEAN));
writer.addField(new FieldDescriptor("b", FieldKind.ARRAY_OF_BOOLEAN));
writer.addField(new FieldDescriptor("c", FieldKind.TIMESTAMP_WITH_TIMEZONE));
Schema schema = writer.build();
InternalSerializationService internalSerializationService = new DefaultSerializationServiceBuilder().setByteOrder(ByteOrder.LITTLE_ENDIAN).build();
BufferObjectDataOutput output = internalSerializationService.createObjectDataOutput();
schema.writeData(output);
long fingerprint64 = RabinFingerprint.fingerprint64(output.toByteArray());
assertEquals(fingerprint64, schema.getSchemaId());
}
use of com.hazelcast.internal.nio.BufferObjectDataOutput in project hazelcast by hazelcast.
the class GenericRecordBuilderTest method testWriteReadGenericRecordToObjectDataInput.
@Test
public void testWriteReadGenericRecordToObjectDataInput() throws IOException {
ClassDefinitionBuilder classDefinitionBuilder = new ClassDefinitionBuilder(1, 1);
classDefinitionBuilder.addIntField("age");
classDefinitionBuilder.addStringField("name");
ClassDefinition classDefinition = classDefinitionBuilder.build();
InternalSerializationService serializationService = new DefaultSerializationServiceBuilder().build();
BufferObjectDataOutput objectDataOutput = serializationService.createObjectDataOutput();
List<GenericRecord> list = new ArrayList<>();
for (int i = 0; i < 10; i++) {
GenericRecord record = GenericRecordBuilder.portable(classDefinition).setInt32("age", i).setString("name", " " + i).build();
objectDataOutput.writeObject(record);
list.add(record);
}
byte[] bytes = objectDataOutput.toByteArray();
BufferObjectDataInput objectDataInput = serializationService.createObjectDataInput(bytes);
for (int i = 0; i < 10; i++) {
GenericRecord record = objectDataInput.readObject();
assertEquals(list.get(i), record);
}
}
use of com.hazelcast.internal.nio.BufferObjectDataOutput in project hazelcast by hazelcast.
the class CollectionTxnUtilTest method testWriteRead.
@Test
public void testWriteRead() throws IOException {
InternalSerializationService ss = new DefaultSerializationServiceBuilder().build();
BufferObjectDataOutput out = ss.createObjectDataOutput();
CollectionTxnUtil.write(out, operationList);
BufferObjectDataInput in = ss.createObjectDataInput(out.toByteArray());
List<Operation> resultList = CollectionTxnUtil.read(in);
assertEquals(operationList.size(), resultList.size());
for (int i = 0; i < operationList.size(); i++) {
assertEquals(operationList.get(i), resultList.get(i));
}
}
use of com.hazelcast.internal.nio.BufferObjectDataOutput in project hazelcast by hazelcast.
the class AbstractSerializationService method toBytes.
private byte[] toBytes(Object obj, int leftPadding, boolean writeHash, PartitioningStrategy strategy, ByteOrder serializerTypeIdByteOrder, boolean includeSchema) {
checkNotNull(obj);
checkNotNull(serializerTypeIdByteOrder);
BufferPool pool = bufferPoolThreadLocal.get();
BufferObjectDataOutput out = pool.takeOutputBuffer();
try {
out.position(leftPadding);
SerializerAdapter serializer = serializerFor(obj, includeSchema);
if (writeHash) {
int partitionHash = calculatePartitionHash(obj, strategy);
out.writeInt(partitionHash, BIG_ENDIAN);
}
out.writeInt(serializer.getTypeId(), serializerTypeIdByteOrder);
serializer.write(out, obj);
return out.toByteArray();
} catch (Throwable e) {
throw handleSerializeException(obj, e);
} finally {
pool.returnOutputBuffer(out);
}
}
use of com.hazelcast.internal.nio.BufferObjectDataOutput in project hazelcast by hazelcast.
the class PortableSerializer method writePortableGenericRecordInternal.
@SuppressWarnings({ "checkstyle:MethodLength", "checkstyle:CyclomaticComplexity" })
void writePortableGenericRecordInternal(ObjectDataOutput out, PortableGenericRecord record) throws IOException {
ClassDefinition cd = record.getClassDefinition();
// Class definition compatibility will be checked implicitly on the
// register call below.
context.registerClassDefinition(cd, context.shouldCheckClassDefinitionErrors());
out.writeInt(cd.getVersion());
BufferObjectDataOutput output = (BufferObjectDataOutput) out;
DefaultPortableWriter writer = new DefaultPortableWriter(this, output, cd);
Set<String> fieldNames = cd.getFieldNames();
for (String fieldName : fieldNames) {
switch(cd.getFieldType(fieldName)) {
case PORTABLE:
writer.writeGenericRecord(fieldName, record.getGenericRecord(fieldName));
break;
case BYTE:
writer.writeByte(fieldName, record.getInt8(fieldName));
break;
case BOOLEAN:
writer.writeBoolean(fieldName, record.getBoolean(fieldName));
break;
case CHAR:
writer.writeChar(fieldName, record.getChar(fieldName));
break;
case SHORT:
writer.writeShort(fieldName, record.getInt16(fieldName));
break;
case INT:
writer.writeInt(fieldName, record.getInt32(fieldName));
break;
case LONG:
writer.writeLong(fieldName, record.getInt64(fieldName));
break;
case FLOAT:
writer.writeFloat(fieldName, record.getFloat32(fieldName));
break;
case DOUBLE:
writer.writeDouble(fieldName, record.getFloat64(fieldName));
break;
case UTF:
writer.writeString(fieldName, record.getString(fieldName));
break;
case DECIMAL:
writer.writeDecimal(fieldName, record.getDecimal(fieldName));
break;
case TIME:
writer.writeTime(fieldName, record.getTime(fieldName));
break;
case DATE:
writer.writeDate(fieldName, record.getDate(fieldName));
break;
case TIMESTAMP:
writer.writeTimestamp(fieldName, record.getTimestamp(fieldName));
break;
case TIMESTAMP_WITH_TIMEZONE:
writer.writeTimestampWithTimezone(fieldName, record.getTimestampWithTimezone(fieldName));
break;
case PORTABLE_ARRAY:
writer.writeGenericRecordArray(fieldName, record.getArrayOfGenericRecord(fieldName));
break;
case BYTE_ARRAY:
writer.writeByteArray(fieldName, record.getArrayOfInt8(fieldName));
break;
case BOOLEAN_ARRAY:
writer.writeBooleanArray(fieldName, record.getArrayOfBoolean(fieldName));
break;
case CHAR_ARRAY:
writer.writeCharArray(fieldName, record.getArrayOfChar(fieldName));
break;
case SHORT_ARRAY:
writer.writeShortArray(fieldName, record.getArrayOfInt16(fieldName));
break;
case INT_ARRAY:
writer.writeIntArray(fieldName, record.getArrayOfInt32(fieldName));
break;
case LONG_ARRAY:
writer.writeLongArray(fieldName, record.getArrayOfInt64(fieldName));
break;
case FLOAT_ARRAY:
writer.writeFloatArray(fieldName, record.getArrayOfFloat32(fieldName));
break;
case DOUBLE_ARRAY:
writer.writeDoubleArray(fieldName, record.getArrayOfFloat64(fieldName));
break;
case UTF_ARRAY:
writer.writeStringArray(fieldName, record.getArrayOfString(fieldName));
break;
case DECIMAL_ARRAY:
writer.writeDecimalArray(fieldName, record.getArrayOfDecimal(fieldName));
break;
case TIME_ARRAY:
writer.writeTimeArray(fieldName, record.getArrayOfTime(fieldName));
break;
case DATE_ARRAY:
writer.writeDateArray(fieldName, record.getArrayOfDate(fieldName));
break;
case TIMESTAMP_ARRAY:
writer.writeTimestampArray(fieldName, record.getArrayOfTimestamp(fieldName));
break;
case TIMESTAMP_WITH_TIMEZONE_ARRAY:
writer.writeTimestampWithTimezoneArray(fieldName, record.getArrayOfTimestampWithTimezone(fieldName));
break;
default:
throw new IllegalStateException("Unexpected field type: " + cd.getFieldType(fieldName));
}
}
writer.end();
}
Aggregations