use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGenerateFilterLogicNodeSingle.
@Test
public void testGenerateFilterLogicNodeSingle() throws Exception {
FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.SINGLE);
Filter filter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
filterLogic.setLeftFilter(filter);
String result = filterController.generateFilterLogicNode(filterLogic, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "`fg1`.`fg1_pk` = 'abc'";
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGenerateFilterLogicNodeCase1.
@Test
public void testGenerateFilterLogicNodeCase1() throws Exception {
FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.AND);
Filter leftFilter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
Filter rightFilter = new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.LESS_THAN_OR_EQUAL, "10");
filterLogic.setLeftFilter(leftFilter);
filterLogic.setRightFilter(rightFilter);
String result = filterController.generateFilterLogicNode(filterLogic, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "`fg1`.`fg1_pk` = 'abc' " + "AND CASE WHEN `fg2`.`fg2_ft` IS NULL THEN 10.0 ELSE `fg2`.`fg2_ft` END <= 10";
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestJoinController method testThreeConditionsOn.
@Test
public void testThreeConditionsOn() {
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", true));
availableLeft.add(new Feature("ft2", true));
availableLeft.add(new Feature("ft3", true));
List<Feature> availableRight = new ArrayList<>();
availableRight.add(new Feature("ft1", true));
availableRight.add(new Feature("ft2", true));
availableRight.add(new Feature("ft3", true));
Query leftQuery = new Query("fs1", "project_fs1", fg1, "fg1", availableLeft, availableLeft);
Query rightQuery = new Query("fs1", "project_fs1", fg2, "fg2", availableRight, availableRight);
Join join = new Join(leftQuery, rightQuery, availableLeft, availableLeft, JoinType.INNER, null, Arrays.asList(SqlCondition.EQUALS, SqlCondition.EQUALS, SqlCondition.EQUALS));
leftQuery.setJoins(Arrays.asList(join));
SqlNode sqlNode = joinController.getLeftRightCondition(join, false);
String sqlConditionStr = sqlNode.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).toString();
Assert.assertEquals("`fg1`.`ft1` = `fg2`.`ft1` AND `fg1`.`ft2` = `fg2`.`ft2` AND `fg1`.`ft3` = `fg2`.`ft3`", sqlConditionStr);
}
Aggregations