use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleJoinOrderBySQLQueryOnline.
@Test
public void testSingleJoinOrderBySQLQueryOnline() throws Exception {
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg1", "Float", null, null));
List<Feature> availableRight = new ArrayList<>();
availableRight.add(new Feature("ft1", "fg2", "Float", null, null));
Query leftQuery = new Query("fs1", "project_fs2", fg1, "fg1", availableLeft, availableLeft);
Query rightQuery = new Query("fs1", "project_fs1", fg2, "fg2", availableRight, availableRight);
Join join = new Join(leftQuery, rightQuery, availableLeft, availableLeft, JoinType.INNER, null, singleEqualsJoinOperator);
leftQuery.setJoins(Arrays.asList(join));
List<Feature> orderByList = new ArrayList<>();
orderByList.addAll(availableLeft);
orderByList.addAll(availableRight);
leftQuery.setOrderByFeatures(orderByList);
String query = target.generateSQL(leftQuery, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg1`.`ft1`, `fg2`.`ft1` FROM `project_fs2`.`fg1_1` `fg1` INNER JOIN " + "`project_fs1`.`fg2_1` `fg2` ON `fg1`.`ft1` = `fg2`.`ft1` ORDER BY `fg1`.`ft1`, `fg2`.`ft1`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project calcite by apache.
the class SqlDialectFactoryImpl method create.
@Override
public SqlDialect create(DatabaseMetaData databaseMetaData) {
SqlDialect.Context c = SqlDialects.createContext(databaseMetaData);
String databaseProductName = c.databaseProductName();
try {
if (databaseProductName == null) {
databaseProductName = databaseMetaData.getDatabaseProductName();
}
} catch (SQLException e) {
throw new RuntimeException("while detecting database product", e);
}
final String upperProductName = databaseProductName.toUpperCase(Locale.ROOT).trim();
switch(upperProductName) {
case "ACCESS":
return new AccessSqlDialect(c);
case "APACHE DERBY":
return new DerbySqlDialect(c);
case "CLICKHOUSE":
return new ClickHouseSqlDialect(c);
case "DBMS:CLOUDSCAPE":
return new DerbySqlDialect(c);
case "EXASOL":
return new ExasolSqlDialect(c);
case "HIVE":
return new HiveSqlDialect(c);
case "INGRES":
return new IngresSqlDialect(c);
case "INTERBASE":
return new InterbaseSqlDialect(c);
case "JETHRODATA":
return new JethroDataSqlDialect(c.withJethroInfo(jethroCache.get(databaseMetaData)));
case "LUCIDDB":
return new LucidDbSqlDialect(c);
case "ORACLE":
return new OracleSqlDialect(c);
case "PHOENIX":
return new PhoenixSqlDialect(c);
case "MYSQL (INFOBRIGHT)":
return new InfobrightSqlDialect(c);
case "MYSQL":
return new MysqlSqlDialect(c.withDataTypeSystem(MysqlSqlDialect.MYSQL_TYPE_SYSTEM));
case "REDSHIFT":
return new RedshiftSqlDialect(c.withDataTypeSystem(RedshiftSqlDialect.TYPE_SYSTEM));
case "SNOWFLAKE":
return new SnowflakeSqlDialect(c);
case "SPARK":
return new SparkSqlDialect(c);
default:
break;
}
// Now the fuzzy matches.
if (databaseProductName.startsWith("DB2")) {
return new Db2SqlDialect(c);
} else if (upperProductName.contains("FIREBIRD")) {
return new FirebirdSqlDialect(c);
} else if (databaseProductName.startsWith("Informix")) {
return new InformixSqlDialect(c);
} else if (upperProductName.contains("NETEZZA")) {
return new NetezzaSqlDialect(c);
} else if (upperProductName.contains("PARACCEL")) {
return new ParaccelSqlDialect(c);
} else if (databaseProductName.startsWith("HP Neoview")) {
return new NeoviewSqlDialect(c);
} else if (upperProductName.contains("POSTGRE")) {
return new PostgresqlSqlDialect(c.withDataTypeSystem(PostgresqlSqlDialect.POSTGRESQL_TYPE_SYSTEM));
} else if (upperProductName.contains("SQL SERVER")) {
return new MssqlSqlDialect(c);
} else if (upperProductName.contains("SYBASE")) {
return new SybaseSqlDialect(c);
} else if (upperProductName.contains("TERADATA")) {
return new TeradataSqlDialect(c);
} else if (upperProductName.contains("HSQL")) {
return new HsqldbSqlDialect(c);
} else if (upperProductName.contains("H2")) {
return new H2SqlDialect(c);
} else if (upperProductName.contains("VERTICA")) {
return new VerticaSqlDialect(c);
} else if (upperProductName.contains("SNOWFLAKE")) {
return new SnowflakeSqlDialect(c);
} else if (upperProductName.contains("SPARK")) {
return new SparkSqlDialect(c);
} else {
return new AnsiSqlDialect(c);
}
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class ConstructorController method construct.
public FsQueryDTO construct(Query query, boolean pitEnabled, boolean isTrainingDataset, Project project, Users user) throws FeaturestoreException, ServiceException {
FsQueryDTO fsQueryDTO = new FsQueryDTO();
fsQueryDTO.setQuery(makeOfflineQuery(query));
fsQueryDTO.setHudiCachedFeatureGroups(getHudiAliases(query, new ArrayList<>(), project, user));
fsQueryDTO.setOnDemandFeatureGroups(getOnDemandAliases(user, project, query, new ArrayList<>()));
// if on-demand feature groups are involved in the query, we don't support online queries
if (fsQueryDTO.getOnDemandFeatureGroups().isEmpty()) {
fsQueryDTO.setQueryOnline(generateSQL(query, true).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql());
}
if (pitEnabled) {
fsQueryDTO.setPitQuery(makePitQuery(query, isTrainingDataset));
}
return fsQueryDTO;
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleSideSQLQuery.
@Test
public void testSingleSideSQLQuery() throws Exception {
ConstructorController constructorController = new ConstructorController();
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg0", true));
Query singleSideQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableLeft, availableLeft);
String query = constructorController.generateSQL(singleSideQuery, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1` FROM `fs1`.`fg1_1` `fg0`", query);
}
use of org.apache.calcite.sql.dialect.SparkSqlDialect in project hopsworks by logicalclocks.
the class TestConstructorController method testSingleOrderBySideSQLQuery.
@Test
public void testSingleOrderBySideSQLQuery() throws Exception {
ConstructorController constructorController = new ConstructorController();
List<Feature> availableLeft = new ArrayList<>();
availableLeft.add(new Feature("ft1", "fg0", true));
Query singleSideQuery = new Query("fs1", "project_fs1", fg1, "fg0", availableLeft, availableLeft);
singleSideQuery.setOrderByFeatures(availableLeft);
String query = constructorController.generateSQL(singleSideQuery, false).toSqlString(new SparkSqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql().replace("\n", " ");
Assert.assertEquals("SELECT `fg0`.`ft1` FROM `fs1`.`fg1_1` `fg0` ORDER BY `fg0`.`ft1`", query);
}
Aggregations