use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGetSQLNodeDate.
@Test
public void testGetSQLNodeDate() throws Exception {
SqlNode node = filterController.getSQLNode("date", "2021-11-12");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "DATE '2021-11-12'";
Assert.assertEquals(expected, result);
result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testBuildFilterNodeSingleJoin.
@Test
public void testBuildFilterNodeSingleJoin() throws Exception {
Query leftQuery = new Query("fs1", "project_fs1", fg1, "fg0", fg1Features, fg1Features);
Query secondQuery = new Query("fs1", "project_fs1", fg2, "fg1", fg2Features, fg2Features);
FilterLogic firstFilter = new FilterLogic(SqlFilterLogic.AND);
firstFilter.setLeftFilter(new Filter(Arrays.asList(fg1Features.get(1)), SqlCondition.EQUALS, "10"));
FilterLogic rightLogic = new FilterLogic(SqlFilterLogic.OR);
rightLogic.setLeftFilter(new Filter(Arrays.asList(fg3Features.get(1)), SqlCondition.EQUALS, "10"));
rightLogic.setRightFilter(new Filter(Arrays.asList(fg3Features.get(2)), SqlCondition.EQUALS, "10"));
firstFilter.setRightLogic(rightLogic);
leftQuery.setFilter(firstFilter);
FilterLogic secondFilter = new FilterLogic(SqlFilterLogic.SINGLE);
secondFilter.setLeftFilter(new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.NOT_EQUALS, "10"));
secondQuery.setFilter(secondFilter);
Join join = new Join(leftQuery, secondQuery, joinFeatures, joinFeatures, JoinType.INNER, null, Arrays.asList(SqlCondition.EQUALS));
leftQuery.setJoins(Arrays.asList(join));
String result = filterController.buildFilterNode(leftQuery, leftQuery, leftQuery.getJoins().size() - 1, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "`fg1`.`fg1_ft` = 10 " + "AND (CASE WHEN `fg3`.`fg3_ft` IS NULL THEN 'default' ELSE `fg3`.`fg3_ft` END = '10' OR `fg3`.`join` = '10') " + "AND CASE WHEN `fg2`.`fg2_ft` IS NULL THEN 10.0 ELSE `fg2`.`fg2_ft` END <> 10";
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGetSQLNodeString.
@Test
public void testGetSQLNodeString() throws Exception {
SqlNode node = filterController.getSQLNode("string", "value_string");
String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "'value_string'";
Assert.assertEquals(expected, result);
result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGenerateFilterLogicNodeCase3.
@Test
public void testGenerateFilterLogicNodeCase3() throws Exception {
FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.OR);
Filter leftFilter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
Filter rightFilter = new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.LESS_THAN_OR_EQUAL, "10");
FilterLogic leftLogic = new FilterLogic(SqlFilterLogic.AND);
Filter middleFilter = new Filter(Arrays.asList(fg3Features.get(0)), SqlCondition.NOT_EQUALS, "abc");
leftLogic.setLeftFilter(leftFilter);
leftLogic.setRightFilter(middleFilter);
filterLogic.setLeftLogic(leftLogic);
filterLogic.setRightFilter(rightFilter);
String result = filterController.generateFilterLogicNode(filterLogic, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "`fg1`.`fg1_pk` = 'abc' AND `fg3`.`fg3_pk` <> 'abc' OR `fg2`.`fg2_ft` <= 10";
Assert.assertEquals(expected, result);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestFilterController method testGenerateFilterLogicNodeCase2.
@Test
public void testGenerateFilterLogicNodeCase2() throws Exception {
FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.AND);
Filter leftFilter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
Filter rightFilter = new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.LESS_THAN_OR_EQUAL, "10");
FilterLogic rightLogic = new FilterLogic(SqlFilterLogic.OR);
Filter middleFilter = new Filter(Arrays.asList(fg3Features.get(0)), SqlCondition.NOT_EQUALS, "abc");
rightLogic.setLeftFilter(middleFilter);
rightLogic.setRightFilter(rightFilter);
filterLogic.setLeftFilter(leftFilter);
filterLogic.setRightLogic(rightLogic);
String result = filterController.generateFilterLogicNode(filterLogic, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
String expected = "`fg1`.`fg1_pk` = 'abc' AND (`fg3`.`fg3_pk` <> 'abc' OR `fg2`.`fg2_ft` <= 10)";
Assert.assertEquals(expected, result);
}
Aggregations