use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project hive by apache.
the class ASTBuilder method literal.
public static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
Object val = null;
int type = 0;
SqlTypeName sqlType = literal.getType().getSqlTypeName();
switch(sqlType) {
case BINARY:
case DATE:
case TIME:
case TIMESTAMP:
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_MONTH:
case INTERVAL_SECOND:
case INTERVAL_YEAR:
case INTERVAL_YEAR_MONTH:
if (literal.getValue() == null) {
return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
}
break;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case DOUBLE:
case DECIMAL:
case FLOAT:
case REAL:
case VARCHAR:
case CHAR:
case BOOLEAN:
if (literal.getValue3() == null) {
return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
}
}
switch(sqlType) {
case TINYINT:
if (useTypeQualInLiteral) {
val = literal.getValue3() + "Y";
} else {
val = literal.getValue3();
}
type = HiveParser.IntegralLiteral;
break;
case SMALLINT:
if (useTypeQualInLiteral) {
val = literal.getValue3() + "S";
} else {
val = literal.getValue3();
}
type = HiveParser.IntegralLiteral;
break;
case INTEGER:
val = literal.getValue3();
type = HiveParser.IntegralLiteral;
break;
case BIGINT:
if (useTypeQualInLiteral) {
val = literal.getValue3() + "L";
} else {
val = literal.getValue3();
}
type = HiveParser.IntegralLiteral;
break;
case DOUBLE:
val = literal.getValue3() + "D";
type = HiveParser.NumberLiteral;
break;
case DECIMAL:
val = literal.getValue3() + "BD";
type = HiveParser.NumberLiteral;
break;
case FLOAT:
case REAL:
val = literal.getValue3();
type = HiveParser.Number;
break;
case VARCHAR:
case CHAR:
val = literal.getValue3();
String escapedVal = BaseSemanticAnalyzer.escapeSQLString(String.valueOf(val));
type = HiveParser.StringLiteral;
val = "'" + escapedVal + "'";
break;
case BOOLEAN:
val = literal.getValue3();
type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
break;
case DATE:
val = "'" + literal.getValueAs(DateString.class).toString() + "'";
type = HiveParser.TOK_DATELITERAL;
break;
case TIME:
val = "'" + literal.getValueAs(TimeString.class).toString() + "'";
type = HiveParser.TOK_TIMESTAMPLITERAL;
break;
case TIMESTAMP:
val = "'" + literal.getValueAs(TimestampString.class).toString() + "'";
type = HiveParser.TOK_TIMESTAMPLITERAL;
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
// Calcite stores timestamp with local time-zone in UTC internally, thus
// when we bring it back, we need to add the UTC suffix.
val = "'" + literal.getValueAs(TimestampString.class).toString() + " UTC'";
type = HiveParser.TOK_TIMESTAMPLOCALTZLITERAL;
break;
case INTERVAL_YEAR:
case INTERVAL_MONTH:
case INTERVAL_YEAR_MONTH:
{
type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
BigDecimal monthsBd = (BigDecimal) literal.getValue();
HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
val = "'" + intervalYearMonth.toString() + "'";
}
break;
case INTERVAL_DAY:
case INTERVAL_DAY_HOUR:
case INTERVAL_DAY_MINUTE:
case INTERVAL_DAY_SECOND:
case INTERVAL_HOUR:
case INTERVAL_HOUR_MINUTE:
case INTERVAL_HOUR_SECOND:
case INTERVAL_MINUTE:
case INTERVAL_MINUTE_SECOND:
case INTERVAL_SECOND:
{
type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
BigDecimal millisBd = (BigDecimal) literal.getValue();
// Calcite literal is in millis, convert to seconds
BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
val = "'" + intervalDayTime.toString() + "'";
}
break;
case NULL:
type = HiveParser.TOK_NULL;
break;
// binary type should not be seen.
case BINARY:
default:
throw new RuntimeException("Unsupported Type: " + sqlType);
}
return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.
the class CassandraTable method query.
/**
* Executes a CQL query on the underlying table.
*
* @param session Cassandra session
* @param fields List of fields to project
* @param predicates A list of predicates which should be used in the query
* @return Enumerator of results
*/
public Enumerable<Object> query(final Session session, List<Map.Entry<String, Class>> fields, final List<Map.Entry<String, String>> selectFields, List<String> predicates, List<String> order, final Integer offset, final Integer fetch) {
// Build the type of the resulting row based on the provided fields
final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataTypeFactory.Builder fieldInfo = typeFactory.builder();
final RelDataType rowType = getRowType(typeFactory);
Function1<String, Void> addField = new Function1<String, Void>() {
public Void apply(String fieldName) {
SqlTypeName typeName = rowType.getField(fieldName, true, false).getType().getSqlTypeName();
fieldInfo.add(fieldName, typeFactory.createSqlType(typeName)).nullable(true);
return null;
}
};
if (selectFields.isEmpty()) {
for (Map.Entry<String, Class> field : fields) {
addField.apply(field.getKey());
}
} else {
for (Map.Entry<String, String> field : selectFields) {
addField.apply(field.getKey());
}
}
final RelProtoDataType resultRowType = RelDataTypeImpl.proto(fieldInfo.build());
// Construct the list of fields to project
final String selectString;
if (selectFields.isEmpty()) {
selectString = "*";
} else {
selectString = Util.toString(new Iterable<String>() {
public Iterator<String> iterator() {
final Iterator<Map.Entry<String, String>> selectIterator = selectFields.iterator();
return new Iterator<String>() {
@Override
public boolean hasNext() {
return selectIterator.hasNext();
}
@Override
public String next() {
Map.Entry<String, String> entry = selectIterator.next();
return entry.getKey() + " AS " + entry.getValue();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}, "", ", ", "");
}
// Combine all predicates conjunctively
String whereClause = "";
if (!predicates.isEmpty()) {
whereClause = " WHERE ";
whereClause += Util.toString(predicates, "", " AND ", "");
}
// Build and issue the query and return an Enumerator over the results
StringBuilder queryBuilder = new StringBuilder("SELECT ");
queryBuilder.append(selectString);
queryBuilder.append(" FROM \"" + columnFamily + "\"");
queryBuilder.append(whereClause);
if (!order.isEmpty()) {
queryBuilder.append(Util.toString(order, " ORDER BY ", ", ", ""));
}
int limit = offset;
if (fetch >= 0) {
limit += fetch;
}
if (limit > 0) {
queryBuilder.append(" LIMIT " + limit);
}
queryBuilder.append(" ALLOW FILTERING");
final String query = queryBuilder.toString();
return new AbstractEnumerable<Object>() {
public Enumerator<Object> enumerator() {
final ResultSet results = session.execute(query);
// Skip results until we get to the right offset
int skip = 0;
Enumerator<Object> enumerator = new CassandraEnumerator(results, resultRowType);
while (skip < offset && enumerator.moveNext()) {
skip++;
}
return enumerator;
}
};
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.
the class CassandraSchema method getRelDataType.
RelProtoDataType getRelDataType(String columnFamily, boolean view) {
List<ColumnMetadata> columns;
if (view) {
columns = getKeyspace().getMaterializedView(columnFamily).getColumns();
} else {
columns = getKeyspace().getTable(columnFamily).getColumns();
}
// Temporary type factory, just for the duration of this method. Allowable
// because we're creating a proto-type, not a type; before being used, the
// proto-type will be copied into a real type factory.
final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
final RelDataTypeFactory.Builder fieldInfo = typeFactory.builder();
for (ColumnMetadata column : columns) {
final String columnName = column.getName();
final DataType type = column.getType();
// TODO: This mapping of types can be done much better
SqlTypeName typeName = SqlTypeName.ANY;
if (type == DataType.uuid() || type == DataType.timeuuid()) {
// We currently rely on this in CassandraFilter to detect UUID columns.
// That is, these fixed length literals should be unquoted in CQL.
typeName = SqlTypeName.CHAR;
} else if (type == DataType.ascii() || type == DataType.text() || type == DataType.varchar()) {
typeName = SqlTypeName.VARCHAR;
} else if (type == DataType.cint() || type == DataType.varint()) {
typeName = SqlTypeName.INTEGER;
} else if (type == DataType.bigint()) {
typeName = SqlTypeName.BIGINT;
} else if (type == DataType.cdouble() || type == DataType.cfloat() || type == DataType.decimal()) {
typeName = SqlTypeName.DOUBLE;
}
fieldInfo.add(columnName, typeFactory.createSqlType(typeName)).nullable(true);
}
return RelDataTypeImpl.proto(fieldInfo.build());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.
the class JdbcSchema method parseTypeString.
/**
* Given "INTEGER", returns BasicSqlType(INTEGER).
* Given "VARCHAR(10)", returns BasicSqlType(VARCHAR, 10).
* Given "NUMERIC(10, 2)", returns BasicSqlType(NUMERIC, 10, 2).
*/
private RelDataType parseTypeString(RelDataTypeFactory typeFactory, String typeString) {
int precision = -1;
int scale = -1;
int open = typeString.indexOf("(");
if (open >= 0) {
int close = typeString.indexOf(")", open);
if (close >= 0) {
String rest = typeString.substring(open + 1, close);
typeString = typeString.substring(0, open);
int comma = rest.indexOf(",");
if (comma >= 0) {
precision = Integer.parseInt(rest.substring(0, comma));
scale = Integer.parseInt(rest.substring(comma));
} else {
precision = Integer.parseInt(rest);
}
}
}
try {
final SqlTypeName typeName = SqlTypeName.valueOf(typeString);
return typeName.allowsPrecScale(true, true) ? typeFactory.createSqlType(typeName, precision, scale) : typeName.allowsPrecScale(true, false) ? typeFactory.createSqlType(typeName, precision) : typeFactory.createSqlType(typeName);
} catch (IllegalArgumentException e) {
return typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.ANY), true);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.
the class JdbcSchema method sqlType.
private RelDataType sqlType(RelDataTypeFactory typeFactory, int dataType, int precision, int scale, String typeString) {
// Fall back to ANY if type is unknown
final SqlTypeName sqlTypeName = Util.first(SqlTypeName.getNameForJdbcType(dataType), SqlTypeName.ANY);
switch(sqlTypeName) {
case ARRAY:
RelDataType component = null;
if (typeString != null && typeString.endsWith(" ARRAY")) {
// E.g. hsqldb gives "INTEGER ARRAY", so we deduce the component type
// "INTEGER".
final String remaining = typeString.substring(0, typeString.length() - " ARRAY".length());
component = parseTypeString(typeFactory, remaining);
}
if (component == null) {
component = typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.ANY), true);
}
return typeFactory.createArrayType(component, -1);
}
if (precision >= 0 && scale >= 0 && sqlTypeName.allowsPrecScale(true, true)) {
return typeFactory.createSqlType(sqlTypeName, precision, scale);
} else if (precision >= 0 && sqlTypeName.allowsPrecNoScale()) {
return typeFactory.createSqlType(sqlTypeName, precision);
} else {
assert sqlTypeName.allowsNoPrecNoScale();
return typeFactory.createSqlType(sqlTypeName);
}
}
Aggregations