use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleJoinSQLQueryOnline.
@Test
public void testSingleJoinSQLQueryOnline() throws Exception {
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg1", "Float", null, null));
List<Feature> availableRight = new ArrayList<>();
availableRight.add(new Feature("ft1", "fg2", "Float", null, null));
Query leftQuery = new Query("fs1", "project_fs2", fg1, "fg1", availableLeft, availableLeft);
Query rightQuery = new Query("fs1", "project_fs1", fg2, "fg2", availableRight, availableRight);
Join join = new Join(leftQuery, rightQuery, availableLeft, availableLeft, JoinType.INNER, null, singleEqualsJoinOperator);
leftQuery.setJoins(Arrays.asList(join));
String query = target.generateSQL(leftQuery, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg1`.`ft1`, `fg2`.`ft1` FROM `project_fs2`.`fg1_1` `fg1` INNER JOIN " + "`project_fs1`.`fg2_1` `fg2` ON `fg1`.`ft1` = `fg2`.`ft1`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testPrefixFeatureJoins.
@Test
public void testPrefixFeatureJoins() throws Exception {
List<Feature> availableFirst = new ArrayList<>();
availableFirst.add(new Feature("ft1", "fg0", "Float", null, null));
List<Feature> availableSecond = new ArrayList<>();
availableSecond.add(new Feature("ft2", "fg1", "Float", null, "prefix2_"));
List<Feature> availableThird = new ArrayList<>();
availableThird.add(new Feature("fg4_ft4_1", "fg2", "Float", null, "prefix4_"));
availableThird.add(new Feature("fg4_ft4_2", "fg2", "Float", null, "prefix4_"));
Mockito.when(cachedFeaturegroupController.dropHudiSpecFeatures(Mockito.any())).thenReturn(availableSecond, availableThird, Stream.of(availableFirst, availableSecond, availableThird).flatMap(Collection::stream).collect(Collectors.toList()));
fg1.getCachedFeaturegroup().setTimeTravelFormat(TimeTravelFormat.HUDI);
fg2.getCachedFeaturegroup().setTimeTravelFormat(TimeTravelFormat.HUDI);
fg4.getCachedFeaturegroup().setTimeTravelFormat(TimeTravelFormat.HUDI);
Query leftQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableFirst, availableFirst);
Query secondQuery = new Query("fs1", "project_fs1", fg2, "fg1", availableSecond, availableSecond);
Query thirdQuery = new Query("fs1", "project_fs1", fg4, "fg2", fg4Features, fg4Features);
Join join = new Join(leftQuery, secondQuery, availableFirst, availableSecond, JoinType.INNER, "prefix2_", singleEqualsJoinOperator);
Join secondJoin = new Join(leftQuery, thirdQuery, availableFirst, availableFirst, JoinType.INNER, "prefix4_", singleEqualsJoinOperator);
leftQuery.setJoins(Arrays.asList(join, secondJoin));
String query = target.generateSQL(leftQuery, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1`, `fg1`.`ft2` `prefix2_ft2`, " + "`fg2`.`fg4_ft4_1` `prefix4_fg4_ft4_1`, `fg2`.`fg4_ft4_2` `prefix4_fg4_ft4_2` " + "FROM `project_fs1`.`fg1_1` `fg0` INNER JOIN `project_fs1`.`fg2_1` `fg1` ON `fg0`.`ft1` = `fg1`.`ft2` " + "INNER JOIN `project_fs1`.`fg4_1` `fg2` ON `fg0`.`ft1` = `fg2`.`ft1`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testTreeWayJoinSQLNode.
@Test
public void testTreeWayJoinSQLNode() throws Exception {
List<Feature> availableFirst = new ArrayList<>();
availableFirst.add(new Feature("ft1", "fg0", "Float", null, null));
List<Feature> availableSecond = new ArrayList<>();
availableSecond.add(new Feature("ft2", "fg1", "Float", null, null));
List<Feature> availableThird = new ArrayList<>();
availableThird.add(new Feature("ft1", "fg2", "Float", null, null));
Query leftQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableFirst, availableFirst);
Query secondQuery = new Query("fs1", "project_fs1", fg2, "fg1", availableSecond, availableSecond);
Query thirdQuery = new Query("fs1", "project_fs1", fg3, "fg2", availableThird, availableThird);
Join join = new Join(leftQuery, secondQuery, availableFirst, availableSecond, JoinType.INNER, null, singleEqualsJoinOperator);
Join secondJoin = new Join(leftQuery, thirdQuery, availableFirst, availableFirst, JoinType.INNER, null, singleEqualsJoinOperator);
leftQuery.setJoins(Arrays.asList(join, secondJoin));
String query = target.generateSQL(leftQuery, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1`, `fg1`.`ft2`, `fg2`.`ft1` " + "FROM `fs1`.`fg1_1` `fg0` " + "INNER JOIN `fs1`.`fg2_1` `fg1` ON `fg0`.`ft1` = `fg1`.`ft2` " + "INNER JOIN `fs1`.`fg3_1` `fg2` ON `fg0`.`ft1` = `fg2`.`ft1`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleOrderBySideSQLQueryOnline.
@Test
public void testSingleOrderBySideSQLQueryOnline() throws Exception {
ConstructorController constructorController = new ConstructorController();
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg0", "Float", null, null));
Query singleSideQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableLeft, availableLeft);
singleSideQuery.setOrderByFeatures(availableLeft);
String query = constructorController.generateSQL(singleSideQuery, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1` FROM `project_fs1`.`fg1_1` `fg0` ORDER BY `fg0`.`ft1`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleSideSQLQueryOnline.
@Test
public void testSingleSideSQLQueryOnline() throws Exception {
ConstructorController constructorController = new ConstructorController();
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg0", "Float", null, null));
Query singleSideQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableLeft, availableLeft);
String query = constructorController.generateSQL(singleSideQuery, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1` FROM `project_fs1`.`fg1_1` `fg0`", query);
}
Aggregations