use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class LogicalStreamAggTest method test.
@Test
public void test() {
HazelcastTable table = partitionedTable("map", asList(field(KEY, INT), field(VALUE, INT)), 1);
List<QueryDataType> parameterTypes = asList(QueryDataType.INT, QueryDataType.INT);
final String sql = "SELECT window_start, window_end, SUM(__key) FROM " + "TABLE(HOP(" + " (SELECT * FROM TABLE(IMPOSE_ORDER((SELECT __key, this FROM map), DESCRIPTOR(__key), 1))), " + "DESCRIPTOR(__key), 2, 1)) " + "GROUP BY window_start, window_end, __key, this";
assertPlan(optimizePhysical(sql, parameterTypes, table).getPhysical(), plan(planRow(0, ProjectPhysicalRel.class), planRow(1, AggregateCombineByKeyPhysicalRel.class), planRow(2, AggregateAccumulateByKeyPhysicalRel.class), planRow(3, ProjectPhysicalRel.class), planRow(4, SlidingWindowPhysicalRel.class), planRow(5, FullScanPhysicalRel.class)));
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class SqlNoSerializationTest method checkIndexUsage.
private void checkIndexUsage(SqlStatement statement, boolean expectedIndexUsage) {
List<QueryDataType> parameterTypes = asList(QueryDataType.INT, QueryDataType.OBJECT, QueryDataType.INT);
List<TableField> mapTableFields = asList(new MapTableField("__key", QueryDataType.INT, false, QueryPath.KEY_PATH), new MapTableField("this", QueryDataType.OBJECT, false, QueryPath.VALUE_PATH), new MapTableField("val", QueryDataType.INT, false, new QueryPath("val", false)));
HazelcastTable table = partitionedTable(MAP_NAME, mapTableFields, getPartitionedMapIndexes(mapContainer(instance().getMap(MAP_NAME)), mapTableFields), KEY_COUNT);
OptimizerTestSupport.Result optimizationResult = optimizePhysical(statement.getSql(), parameterTypes, table);
assertPlan(optimizationResult.getLogical(), plan(planRow(0, FullScanLogicalRel.class)));
if (expectedIndexUsage) {
assertPlan(optimizationResult.getPhysical(), plan(planRow(0, IndexScanMapPhysicalRel.class)));
} else {
assertPlan(optimizationResult.getPhysical(), plan(planRow(0, FullScanPhysicalRel.class)));
}
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class ComparisonPredicateIntegrationTest method checkNumeric.
private void checkNumeric(Object value1, Object value2) {
int res = compareNumeric(value1, value2);
if (res == RES_EQ && (value1 instanceof Float || value1 instanceof Double || value2 instanceof Float || value2 instanceof Double)) {
return;
}
ExpressionType<?> valueType1 = ExpressionTypes.resolve(value1);
ExpressionType<?> valueType2 = ExpressionTypes.resolve(value2);
Class<? extends ExpressionValue> class1 = ExpressionValue.createClass(valueType1);
Class<? extends ExpressionValue> class2 = ExpressionValue.createClass(valueType2);
Class<? extends ExpressionBiValue> biClass = ExpressionBiValue.createBiClass(valueType1, valueType2);
String literal1 = value1.toString();
String literal2 = value2.toString();
QueryDataType type1 = QueryDataTypeUtils.resolveTypeForClass(value1.getClass());
QueryDataType type2 = QueryDataTypeUtils.resolveTypeForClass(value2.getClass());
SqlColumnType publicType1 = type1.getTypeFamily().getPublicType();
SqlColumnType publicType2 = type2.getTypeFamily().getPublicType();
int precedence1 = type1.getTypeFamily().getPrecedence();
int precedence2 = type2.getTypeFamily().getPrecedence();
// Column/column
putCheckCommute(ExpressionBiValue.createBiValue(biClass, value1, value2), "field1", "field2", res);
putCheckCommute(ExpressionBiValue.createBiValue(biClass, value1, null), "field1", "field2", RES_NULL);
putCheckCommute(ExpressionBiValue.createBiValue(biClass, null, value2), "field1", "field2", RES_NULL);
// Column/literal
putCheckCommute(ExpressionValue.create(class1, value1), "field1", literal2, res);
putCheckCommute(ExpressionValue.create(class2, value2), literal1, "field1", res);
// Column/parameter
if (precedence1 >= precedence2) {
putCheckCommute(ExpressionValue.create(class1, value1), "field1", "?", res, value2);
} else if (precedence1 >= QueryDataType.BIGINT.getTypeFamily().getPrecedence()) {
putAndCheckFailure(ExpressionValue.create(class1, value1), sql(mode.token(), "field1", "?"), SqlErrorCode.DATA_EXCEPTION, parameterError(0, publicType1, publicType2), value2);
}
if (precedence2 >= precedence1) {
putCheckCommute(ExpressionValue.create(class2, value2), "?", "field1", res, value1);
} else if (precedence2 >= QueryDataType.BIGINT.getTypeFamily().getPrecedence()) {
putAndCheckFailure(ExpressionValue.create(class2, value2), sql(mode.token(), "?", "field1"), SqlErrorCode.DATA_EXCEPTION, parameterError(0, publicType2, publicType1), value1);
}
// Literal/literal
checkCommute(literal1, literal2, res);
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class OptimizerTestSupport method context.
private static OptimizerContext context(HazelcastSchema schema, QueryDataType... parameterTypes) {
OptimizerContext context = OptimizerContext.create(HazelcastSchemaUtils.createCatalog(schema), QueryUtils.prepareSearchPaths(null, null), emptyList(), 1, name -> null);
ParameterConverter[] parameterConverters = IntStream.range(0, parameterTypes.length).mapToObj(i -> new StrictParameterConverter(i, SqlParserPos.ZERO, parameterTypes[i])).toArray(ParameterConverter[]::new);
QueryParameterMetadata parameterMetadata = new QueryParameterMetadata(parameterConverters);
context.setParameterMetadata(parameterMetadata);
return context;
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class OptimizerTestSupport method optimizePhysical.
protected Result optimizePhysical(String sql, List<QueryDataType> parameterTypes, HazelcastTable... tables) {
HazelcastSchema schema = schema(tables);
OptimizerContext context = context(schema, parameterTypes.toArray(new QueryDataType[0]));
return optimizePhysicalInternal(sql, context);
}
Aggregations