Search in sources :

Example 36 with SparkSqlDialect

use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.

the class TestFilterController method testGetSQLNodeDate.

@Test
public void testGetSQLNodeDate() throws Exception {
    SqlNode node = filterController.getSQLNode("date", "2021-11-12");
    String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    String expected = "DATE '2021-11-12'";
    Assert.assertEquals(expected, result);
    result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    Assert.assertEquals(expected, result);
}
Also used : SparkSqlDialect(org.apache.calcite.sql.dialect.SparkSqlDialect) HiveSqlDialect(org.apache.calcite.sql.dialect.HiveSqlDialect) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 37 with SparkSqlDialect

use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.

the class TestFilterController method testBuildFilterNodeSingleJoin.

@Test
public void testBuildFilterNodeSingleJoin() throws Exception {
    Query leftQuery = new Query("fs1", "project_fs1", fg1, "fg0", fg1Features, fg1Features);
    Query secondQuery = new Query("fs1", "project_fs1", fg2, "fg1", fg2Features, fg2Features);
    FilterLogic firstFilter = new FilterLogic(SqlFilterLogic.AND);
    firstFilter.setLeftFilter(new Filter(Arrays.asList(fg1Features.get(1)), SqlCondition.EQUALS, "10"));
    FilterLogic rightLogic = new FilterLogic(SqlFilterLogic.OR);
    rightLogic.setLeftFilter(new Filter(Arrays.asList(fg3Features.get(1)), SqlCondition.EQUALS, "10"));
    rightLogic.setRightFilter(new Filter(Arrays.asList(fg3Features.get(2)), SqlCondition.EQUALS, "10"));
    firstFilter.setRightLogic(rightLogic);
    leftQuery.setFilter(firstFilter);
    FilterLogic secondFilter = new FilterLogic(SqlFilterLogic.SINGLE);
    secondFilter.setLeftFilter(new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.NOT_EQUALS, "10"));
    secondQuery.setFilter(secondFilter);
    Join join = new Join(leftQuery, secondQuery, joinFeatures, joinFeatures, JoinType.INNER, null, Arrays.asList(SqlCondition.EQUALS));
    leftQuery.setJoins(Arrays.asList(join));
    String result = filterController.buildFilterNode(leftQuery, leftQuery, leftQuery.getJoins().size() - 1, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    String expected = "`fg1`.`fg1_ft` = 10 " + "AND (CASE WHEN `fg3`.`fg3_ft` IS NULL THEN 'default' ELSE `fg3`.`fg3_ft` END = '10' OR `fg3`.`join` = '10') " + "AND CASE WHEN `fg2`.`fg2_ft` IS NULL THEN 10.0 ELSE `fg2`.`fg2_ft` END <> 10";
    Assert.assertEquals(expected, result);
}
Also used : SparkSqlDialect(org.apache.calcite.sql.dialect.SparkSqlDialect) Query(io.hops.hopsworks.common.featurestore.query.Query) SqlFilterLogic(io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.SqlFilterLogic) Join(io.hops.hopsworks.common.featurestore.query.join.Join) Test(org.junit.Test)

Example 38 with SparkSqlDialect

use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.

the class TestFilterController method testGetSQLNodeString.

@Test
public void testGetSQLNodeString() throws Exception {
    SqlNode node = filterController.getSQLNode("string", "value_string");
    String result = node.toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    String expected = "'value_string'";
    Assert.assertEquals(expected, result);
    result = node.toSqlString(new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    Assert.assertEquals(expected, result);
}
Also used : SparkSqlDialect(org.apache.calcite.sql.dialect.SparkSqlDialect) HiveSqlDialect(org.apache.calcite.sql.dialect.HiveSqlDialect) SqlNode(org.apache.calcite.sql.SqlNode) Test(org.junit.Test)

Example 39 with SparkSqlDialect

use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.

the class TestFilterController method testGenerateFilterLogicNodeCase3.

@Test
public void testGenerateFilterLogicNodeCase3() throws Exception {
    FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.OR);
    Filter leftFilter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
    Filter rightFilter = new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.LESS_THAN_OR_EQUAL, "10");
    FilterLogic leftLogic = new FilterLogic(SqlFilterLogic.AND);
    Filter middleFilter = new Filter(Arrays.asList(fg3Features.get(0)), SqlCondition.NOT_EQUALS, "abc");
    leftLogic.setLeftFilter(leftFilter);
    leftLogic.setRightFilter(middleFilter);
    filterLogic.setLeftLogic(leftLogic);
    filterLogic.setRightFilter(rightFilter);
    String result = filterController.generateFilterLogicNode(filterLogic, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    String expected = "`fg1`.`fg1_pk` = 'abc' AND `fg3`.`fg3_pk` <> 'abc' OR `fg2`.`fg2_ft` <= 10";
    Assert.assertEquals(expected, result);
}
Also used : SparkSqlDialect(org.apache.calcite.sql.dialect.SparkSqlDialect) SqlFilterLogic(io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.SqlFilterLogic) Test(org.junit.Test)

Example 40 with SparkSqlDialect

use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.

the class TestFilterController method testGenerateFilterLogicNodeCase2.

@Test
public void testGenerateFilterLogicNodeCase2() throws Exception {
    FilterLogic filterLogic = new FilterLogic(SqlFilterLogic.AND);
    Filter leftFilter = new Filter(Arrays.asList(fg1Features.get(0)), SqlCondition.EQUALS, "abc");
    Filter rightFilter = new Filter(Arrays.asList(fg2Features.get(1)), SqlCondition.LESS_THAN_OR_EQUAL, "10");
    FilterLogic rightLogic = new FilterLogic(SqlFilterLogic.OR);
    Filter middleFilter = new Filter(Arrays.asList(fg3Features.get(0)), SqlCondition.NOT_EQUALS, "abc");
    rightLogic.setLeftFilter(middleFilter);
    rightLogic.setRightFilter(rightFilter);
    filterLogic.setLeftFilter(leftFilter);
    filterLogic.setRightLogic(rightLogic);
    String result = filterController.generateFilterLogicNode(filterLogic, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql();
    String expected = "`fg1`.`fg1_pk` = 'abc' AND (`fg3`.`fg3_pk` <> 'abc' OR `fg2`.`fg2_ft` <= 10)";
    Assert.assertEquals(expected, result);
}
Also used : SparkSqlDialect(org.apache.calcite.sql.dialect.SparkSqlDialect) SqlFilterLogic(io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.SqlFilterLogic) Test(org.junit.Test)

Aggregations

SparkSqlDialect (org.apache.calcite.sql.dialect.SparkSqlDialect)43 Test (org.junit.Test)41 ArrayList (java.util.ArrayList)27 Join (io.hops.hopsworks.common.featurestore.query.join.Join)21 Query (io.hops.hopsworks.common.featurestore.query.Query)14 Feature (io.hops.hopsworks.common.featurestore.query.Feature)13 SqlCondition (io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.SqlCondition)10 SqlFilterLogic (io.hops.hopsworks.persistence.entity.featurestore.trainingdataset.SqlFilterLogic)10 SqlNode (org.apache.calcite.sql.SqlNode)7 HiveSqlDialect (org.apache.calcite.sql.dialect.HiveSqlDialect)5 Collection (java.util.Collection)3 Filter (io.hops.hopsworks.common.featurestore.query.filter.Filter)2 FilterLogic (io.hops.hopsworks.common.featurestore.query.filter.FilterLogic)2 SQLException (java.sql.SQLException)1 SqlCall (org.apache.calcite.sql.SqlCall)1 SqlSelect (org.apache.calcite.sql.SqlSelect)1 AccessSqlDialect (org.apache.calcite.sql.dialect.AccessSqlDialect)1 AnsiSqlDialect (org.apache.calcite.sql.dialect.AnsiSqlDialect)1 BigQuerySqlDialect (org.apache.calcite.sql.dialect.BigQuerySqlDialect)1 CalciteSqlDialect (org.apache.calcite.sql.dialect.CalciteSqlDialect)1