use of org.apache.calcite.sql.SqlWriter in project hive by apache.
the class HiveBetween method unparse.
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
final SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.create("BETWEEN"), "", "");
call.operand(VALUE_OPERAND).unparse(writer, getLeftPrec(), 0);
writer.sep(super.getName());
// If the expression for the lower bound contains a call to an AND
// operator, we need to wrap the expression in parentheses to prevent
// the AND from associating with BETWEEN. For example, we should
// unparse
// a BETWEEN b OR (c AND d) OR e AND f
// as
// a BETWEEN (b OR c AND d) OR e) AND f
// If it were unparsed as
// a BETWEEN b OR c AND d OR e AND f
// then it would be interpreted as
// (a BETWEEN (b OR c) AND d) OR (e AND f)
// which would be wrong.
final SqlNode lower = call.operand(LOWER_OPERAND);
final SqlNode upper = call.operand(UPPER_OPERAND);
int lowerPrec = new AndFinder().containsAnd(lower) ? 100 : 0;
lower.unparse(writer, lowerPrec, lowerPrec);
writer.sep("AND");
upper.unparse(writer, 0, getRightPrec());
writer.endList(frame);
}
use of org.apache.calcite.sql.SqlWriter in project hive by apache.
the class HiveConcat method unparse.
@Override
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
List<SqlNode> opList = call.getOperandList();
assert (opList.size() >= 1);
final SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
SqlNode sqlNode = opList.get(0);
sqlNode.unparse(writer, leftPrec, getLeftPrec());
for (SqlNode op : opList.subList(1, opList.size() - 1)) {
writer.setNeedWhitespace(true);
writer.sep("||");
writer.setNeedWhitespace(true);
op.unparse(writer, 0, 0);
}
sqlNode = opList.get(opList.size() - 1);
writer.setNeedWhitespace(true);
writer.sep("||");
writer.setNeedWhitespace(true);
sqlNode.unparse(writer, getRightPrec(), rightPrec);
writer.endList(frame);
}
use of org.apache.calcite.sql.SqlWriter in project beam by apache.
the class BeamDDLTest method unparseAggregateFunction.
@Test
public void unparseAggregateFunction() {
SqlIdentifier name = new SqlIdentifier("foo", SqlParserPos.ZERO);
SqlNode jarPath = SqlLiteral.createCharString("path/to/udf.jar", SqlParserPos.ZERO);
SqlCreateFunction createFunction = new SqlCreateFunction(SqlParserPos.ZERO, false, name, jarPath, true);
SqlWriter sqlWriter = new SqlPrettyWriter(BeamBigQuerySqlDialect.DEFAULT);
createFunction.unparse(sqlWriter, 0, 0);
assertEquals("CREATE AGGREGATE FUNCTION foo USING JAR 'path/to/udf.jar'", sqlWriter.toSqlString().getSql());
}
use of org.apache.calcite.sql.SqlWriter in project beam by apache.
the class BeamDDLTest method unparseScalarFunction.
@Test
public void unparseScalarFunction() {
SqlIdentifier name = new SqlIdentifier("foo", SqlParserPos.ZERO);
SqlNode jarPath = SqlLiteral.createCharString("path/to/udf.jar", SqlParserPos.ZERO);
SqlCreateFunction createFunction = new SqlCreateFunction(SqlParserPos.ZERO, false, name, jarPath, false);
SqlWriter sqlWriter = new SqlPrettyWriter(BeamBigQuerySqlDialect.DEFAULT);
createFunction.unparse(sqlWriter, 0, 0);
assertEquals("CREATE FUNCTION foo USING JAR 'path/to/udf.jar'", sqlWriter.toSqlString().getSql());
}
use of org.apache.calcite.sql.SqlWriter in project hazelcast by hazelcast.
the class SqlCreateJob method unparse.
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("CREATE JOB");
if (ifNotExists) {
writer.keyword("IF NOT EXISTS");
}
name.unparse(writer, leftPrec, rightPrec);
if (options.size() > 0) {
writer.newlineAndIndent();
writer.keyword("OPTIONS");
SqlWriter.Frame withFrame = writer.startList("(", ")");
for (SqlNode property : options) {
printIndent(writer);
property.unparse(writer, leftPrec, rightPrec);
}
writer.newlineAndIndent();
writer.endList(withFrame);
}
writer.newlineAndIndent();
writer.keyword("AS");
sqlInsert.unparse(writer, leftPrec, rightPrec);
}
Aggregations