use of com.hazelcast.config.CompactSerializationConfig in project hazelcast by hazelcast.
the class GenericRecordTest method createSerializationService.
private SerializationService createSerializationService() {
CompactSerializationConfig compactSerializationConfig = new CompactSerializationConfig();
compactSerializationConfig.setEnabled(true);
return new DefaultSerializationServiceBuilder().setSchemaService(schemaService).setConfig(new SerializationConfig().setCompactSerializationConfig(compactSerializationConfig)).build();
}
use of com.hazelcast.config.CompactSerializationConfig in project hazelcast by hazelcast.
the class CompactFormatSplitBrainTest method testSchemaAccessibleAfterMergingClusters.
@Test
public void testSchemaAccessibleAfterMergingClusters() {
Config config = smallInstanceConfig();
config.getMapConfig("map1").getMergePolicyConfig().setPolicy(PutIfAbsentMergePolicy.class.getName());
config.getMapConfig("map3").getMergePolicyConfig().setPolicy(PutIfAbsentMergePolicy.class.getName());
config.getSerializationConfig().setCompactSerializationConfig(new CompactSerializationConfig().setEnabled(true));
config.setProperty(ClusterProperty.MERGE_FIRST_RUN_DELAY_SECONDS.getName(), "1");
config.setProperty(ClusterProperty.MERGE_NEXT_RUN_DELAY_SECONDS.getName(), "1");
HazelcastInstance instance1 = factory.newHazelcastInstance(config);
HazelcastInstance instance2 = factory.newHazelcastInstance(config);
HazelcastInstance instance3 = factory.newHazelcastInstance(config);
SplitBrainTestSupport.blockCommunicationBetween(instance1, instance3);
closeConnectionBetween(instance1, instance3);
SplitBrainTestSupport.blockCommunicationBetween(instance2, instance3);
closeConnectionBetween(instance2, instance3);
// make sure that cluster is split as [ 1 , 2 ] , [ 3 ]
assertClusterSizeEventually(2, instance1, instance2);
assertClusterSizeEventually(1, instance3);
IMap<Integer, EmployeeDTO> map1 = instance1.getMap("map1");
for (int i = 0; i < 100; i++) {
EmployeeDTO employeeDTO = new EmployeeDTO(i, 102310312);
map1.put(i, employeeDTO);
}
IMap<Integer, NodeDTO> map3 = instance3.getMap("map3");
for (int i = 0; i < 100; i++) {
NodeDTO node = new NodeDTO(new NodeDTO(null, i), i);
map3.put(i, node);
}
assertEquals(100, map1.size());
assertEquals(100, map3.size());
SplitBrainTestSupport.unblockCommunicationBetween(instance1, instance3);
SplitBrainTestSupport.unblockCommunicationBetween(instance2, instance3);
assertClusterSizeEventually(3, instance1, instance2, instance3);
assertEquals(100, map1.size());
assertTrueEventually(() -> assertEquals(100, map3.size()));
int size1 = map1.keySet(Predicates.sql("age > 19")).size();
assertEquals(80, size1);
int size3 = map3.keySet(Predicates.sql("child.id > 19")).size();
assertEquals(80, size3);
}
use of com.hazelcast.config.CompactSerializationConfig in project hazelcast by hazelcast.
the class CompactStreamSerializerTest method testWithExplicitSerializer_nested.
@Test
public void testWithExplicitSerializer_nested() {
SerializationConfig serializationConfig = new SerializationConfig();
CompactSerializationConfig compactSerializationConfig = serializationConfig.getCompactSerializationConfig();
compactSerializationConfig.setEnabled(true);
compactSerializationConfig.register(EmployeeDTO.class, "employee", new CompactSerializer<EmployeeDTO>() {
@Nonnull
@Override
public EmployeeDTO read(@Nonnull CompactReader in) {
return new EmployeeDTO(in.readInt32("a"), in.readInt64("i"));
}
@Override
public void write(@Nonnull CompactWriter out, @Nonnull EmployeeDTO object) {
out.writeInt32("a", object.getAge());
out.writeInt64("i", object.getId());
}
});
compactSerializationConfig.register(EmployerDTO.class, "employer", new CompactSerializer<EmployerDTO>() {
@Nonnull
@Override
public EmployerDTO read(@Nonnull CompactReader in) {
String name = in.readString("n");
String status = in.readString("hs");
int age = in.readInt32("a");
long[] ids = in.readArrayOfInt64("ids");
EmployeeDTO s = in.readCompact("s");
EmployeeDTO[] ss = in.readArrayOfCompact("ss", EmployeeDTO.class);
return new EmployerDTO(name, age, status == null ? null : HiringStatus.valueOf(status), ids, s, ss);
}
@Override
public void write(@Nonnull CompactWriter out, @Nonnull EmployerDTO object) {
out.writeString("n", object.getName());
out.writeString("hs", object.getHiringStatus() == null ? null : object.getHiringStatus().name());
out.writeInt32("a", object.getZcode());
out.writeArrayOfInt64("ids", object.getIds());
out.writeCompact("s", object.getSingleEmployee());
out.writeArrayOfCompact("ss", object.getOtherEmployees());
}
});
SerializationService serializationService = new DefaultSerializationServiceBuilder().setConfig(serializationConfig).setSchemaService(schemaService).build();
EmployeeDTO employeeDTO = new EmployeeDTO(30, 102310312);
long[] ids = new long[2];
ids[0] = 22;
ids[1] = 44;
EmployeeDTO[] employeeDTOS = new EmployeeDTO[5];
for (int j = 0; j < employeeDTOS.length; j++) {
employeeDTOS[j] = new EmployeeDTO(20 + j, j * 100);
}
EmployerDTO employerDTO = new EmployerDTO("nbss", 40, HIRING, ids, employeeDTO, employeeDTOS);
Data data = serializationService.toData(employerDTO);
Object object = serializationService.toObject(data);
EmployerDTO o = (EmployerDTO) object;
assertEquals(employerDTO, o);
}
use of com.hazelcast.config.CompactSerializationConfig in project hazelcast by hazelcast.
the class CompactReflectiveSerializerUnsupportedFieldsTest method createSerializationService.
@Before
public void createSerializationService() {
CompactSerializationConfig compactSerializationConfig = new CompactSerializationConfig();
compactSerializationConfig.setEnabled(true);
service = new DefaultSerializationServiceBuilder().setSchemaService(schemaService).setConfig(new SerializationConfig().setCompactSerializationConfig(compactSerializationConfig)).build();
}
use of com.hazelcast.config.CompactSerializationConfig in project hazelcast by hazelcast.
the class ClientConfigXmlGeneratorTest method testCompactSerialization.
@Test
public void testCompactSerialization() {
CompactSerializationConfig expected = new CompactSerializationConfig();
expected.setEnabled(true);
expected.register(EmployerDTO.class);
expected.register(EmployeeDTO.class, "employee", new EmployeeDTOSerializer());
clientConfig.getSerializationConfig().setCompactSerializationConfig(expected);
CompactSerializationConfig actual = newConfigViaGenerator().getSerializationConfig().getCompactSerializationConfig();
assertEquals(expected.isEnabled(), actual.isEnabled());
// Since we don't have APIs of the form register(String) or register(String, String, String) in the
// compact serialization config, when we read the config from XML/YAML, we store registered classes
// in a different map.
Map<String, TriTuple<String, String, String>> namedRegistrations = CompactSerializationConfigAccessor.getNamedRegistrations(actual);
Map<String, TriTuple<Class, String, CompactSerializer>> registrations = CompactSerializationConfigAccessor.getRegistrations(actual);
for (Map.Entry<String, TriTuple<Class, String, CompactSerializer>> entry : registrations.entrySet()) {
String key = entry.getKey();
TriTuple<Class, String, CompactSerializer> expectedRegistration = entry.getValue();
TriTuple<String, String, String> actualRegistration = namedRegistrations.get(key);
assertEquals(expectedRegistration.element1.getName(), actualRegistration.element1);
assertEquals(expectedRegistration.element2, actualRegistration.element2);
CompactSerializer serializer = expectedRegistration.element3;
if (serializer != null) {
assertEquals(serializer.getClass().getName(), actualRegistration.element3);
} else {
assertNull(actualRegistration.element3);
}
}
}
Aggregations