use of example.serialization.EmployeeDTO in project hazelcast by hazelcast.
the class SqlClientCompactQueryTest method testQueryOnPrimitive_selectValue.
@Test
public void testQueryOnPrimitive_selectValue() {
HazelcastInstance client = factory.newHazelcastClient(clientConfig());
IMap<Integer, Object> map = client.getMap("test");
for (int i = 0; i < 10; i++) {
map.put(i, new EmployeeDTO(i, i));
}
client.getSql().execute("CREATE MAPPING " + "test" + '(' + "__key INTEGER" + ", age INTEGER" + ", \"rank\" INTEGER" + ", id BIGINT" + ", isHired BOOLEAN" + ", isFired BOOLEAN" + ") TYPE " + IMapSqlConnector.TYPE_NAME + ' ' + "OPTIONS (" + '\'' + OPTION_KEY_FORMAT + "'='int'" + ", '" + OPTION_VALUE_FORMAT + "'='" + COMPACT_FORMAT + '\'' + ", '" + OPTION_VALUE_COMPACT_TYPE_NAME + "'='" + EmployeeDTO.class.getName() + '\'' + ")");
SqlResult result = client.getSql().execute("SELECT this FROM test WHERE age >= 5");
assertThat(result).hasSize(5);
}
use of example.serialization.EmployeeDTO in project hazelcast by hazelcast.
the class CompactFormatIntegrationTest method testClusterRestart.
@Test
public void testClusterRestart() {
EmployeeDTO employeeDTO = new EmployeeDTO(30, 102310312);
IMap<Integer, EmployeeDTO> map = instance1.getMap("test");
map.put(1, employeeDTO);
restartCluster();
map.put(1, employeeDTO);
assertEquals(employeeDTO, map.get(1));
// Perform a query to make sure that the schema is available on the cluster
assertEquals(1, map.values(Predicates.sql("age == 30")).size());
}
use of example.serialization.EmployeeDTO in project hazelcast by hazelcast.
the class CompactFormatIntegrationTest method testBasic.
@Test
public void testBasic() {
EmployeeDTO employeeDTO = new EmployeeDTO(30, 102310312);
IMap<Integer, EmployeeDTO> map = instance1.getMap("test");
map.put(1, employeeDTO);
IMap<Integer, EmployeeDTO> map2 = instance2.getMap("test");
assertEquals(employeeDTO, map2.get(1));
}
use of example.serialization.EmployeeDTO in project hazelcast by hazelcast.
the class CompactStreamSerializerTest method testSchemaEvolution_fieldAdded.
@Test
public void testSchemaEvolution_fieldAdded() {
SerializationConfig serializationConfig = new SerializationConfig();
// Using this registration to mimic schema evolution. This is usage is not advised.
serializationConfig.getCompactSerializationConfig().setEnabled(true).register(EmployeeDTO.class, EmployeeDTO.class.getName(), new CompactSerializer<EmployeeDTO>() {
@Nonnull
@Override
public EmployeeDTO read(@Nonnull CompactReader in) {
throw new UnsupportedOperationException("We will not read from here on this test");
}
@Override
public void write(@Nonnull CompactWriter out, @Nonnull EmployeeDTO object) {
out.writeInt32("age", object.getAge());
out.writeInt64("id", object.getId());
out.writeString("surname", "sir");
}
});
SerializationService serializationService = new DefaultSerializationServiceBuilder().setConfig(serializationConfig).setSchemaService(schemaService).build();
EmployeeDTO expected = new EmployeeDTO(20, 102310312);
Data data = serializationService.toData(expected);
SerializationConfig serializationConfig2 = new SerializationConfig();
serializationConfig2.getCompactSerializationConfig().setEnabled(true);
SerializationService serializationService2 = new DefaultSerializationServiceBuilder().setSchemaService(schemaService).setConfig(serializationConfig2).build();
EmployeeDTO actual = serializationService2.toObject(data);
assertEquals(expected.getAge(), actual.getAge());
assertEquals(expected.getId(), actual.getId());
}
use of example.serialization.EmployeeDTO in project hazelcast by hazelcast.
the class CompactStreamSerializerTest method testFieldOrder.
@Test
public void testFieldOrder() throws IOException {
EmployeeDTO employeeDTO = new EmployeeDTO(30, 102310312);
long[] ids = new long[2];
ids[0] = 22;
ids[1] = 44;
EmployeeDTO[] employeeDTOS = new EmployeeDTO[5];
for (int j = 0; j < employeeDTOS.length; j++) {
employeeDTOS[j] = new EmployeeDTO(20 + j, j * 100);
}
SchemaWriter writer = new SchemaWriter("typeName");
ReflectiveCompactSerializer reflectiveCompactSerializer = new ReflectiveCompactSerializer();
EmployerDTO employerDTO = new EmployerDTO("nbss", 40, HIRING, ids, employeeDTO, employeeDTOS);
reflectiveCompactSerializer.write(writer, employerDTO);
Schema schema = writer.build();
assertEquals(0, schema.getField("zcode").getOffset());
assertEquals(-1, schema.getField("zcode").getIndex());
assertEquals(-1, schema.getField("hiringStatus").getOffset());
assertEquals(0, schema.getField("hiringStatus").getIndex());
assertEquals(-1, schema.getField("ids").getOffset());
assertEquals(1, schema.getField("ids").getIndex());
assertEquals(-1, schema.getField("name").getOffset());
assertEquals(2, schema.getField("name").getIndex());
assertEquals(-1, schema.getField("otherEmployees").getOffset());
assertEquals(3, schema.getField("otherEmployees").getIndex());
assertEquals(-1, schema.getField("singleEmployee").getOffset());
assertEquals(4, schema.getField("singleEmployee").getIndex());
}
Aggregations