use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.
the class RcFileTester method preprocessWriteValueOld.
private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP)) {
long millis = ((SqlTimestamp) value).getMillis();
if (format == Format.BINARY) {
millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
}
return Timestamp.ofEpochMilli(millis);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type.getTypeSignature().getBase().equals(ARRAY)) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
}
if (type.getTypeSignature().getBase().equals(MAP)) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
}
return newMap;
}
if (type.getTypeSignature().getBase().equals(ROW)) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.
the class ElasticsearchMetadata method toPrestoType.
private Type toPrestoType(IndexMetadata.Field metaDataField, boolean isArray) {
IndexMetadata.Type type = metaDataField.getType();
if (isArray) {
Type elementType = toPrestoType(metaDataField, false);
return new ArrayType(elementType);
}
if (type instanceof PrimitiveType) {
switch(((PrimitiveType) type).getName()) {
case "float":
return REAL;
case "double":
return DOUBLE;
case "byte":
return TINYINT;
case "short":
return SMALLINT;
case "integer":
return INTEGER;
case "long":
return BIGINT;
case "string":
case "text":
case "keyword":
return VARCHAR;
case "ip":
return ipAddressType;
case "boolean":
return BOOLEAN;
case "binary":
return VARBINARY;
default:
break;
}
} else if (type instanceof DateTimeType) {
if (((DateTimeType) type).getFormats().isEmpty()) {
return TIMESTAMP;
}
// otherwise, skip -- we don't support custom formats, yet
} else if (type instanceof ObjectType) {
ObjectType objectType = (ObjectType) type;
List<RowType.Field> fields = objectType.getFields().stream().map(field -> RowType.field(field.getName(), toPrestoType(field))).collect(toImmutableList());
return RowType.from(fields);
}
return null;
}
use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.
the class QueryPlanner method plan.
public UpdateNode plan(Update node) {
Table table = node.getTable();
TableHandle handle = analysis.getTableHandle(table);
TableMetadata tableMetadata = metadata.getTableMetadata(session, handle);
List<ColumnMetadata> dataColumns = tableMetadata.getMetadata().getColumns().stream().filter(column -> !column.isHidden()).collect(toImmutableList());
List<String> targetColumnNames = node.getAssignmentItems().stream().map(assignment -> assignment.getName().toString()).collect(toImmutableList());
// Create lists of columnnames and SET expressions, in table column order
ImmutableList.Builder<String> updatedColumnNamesBuilder = ImmutableList.builder();
ImmutableList.Builder<Type> updatedColumnTypesBuilder = ImmutableList.builder();
ImmutableList.Builder<Expression> orderedColumnValuesBuilder = ImmutableList.builder();
ImmutableMap.Builder<String, Expression> setExpressions = new ImmutableMap.Builder<>();
for (ColumnMetadata columnMetadata : dataColumns) {
String name = columnMetadata.getName();
Type type = columnMetadata.getType();
int index = targetColumnNames.indexOf(name);
if (index >= 0) {
updatedColumnNamesBuilder.add(name);
updatedColumnTypesBuilder.add(type);
orderedColumnValuesBuilder.add(node.getAssignmentItems().get(index).getValue());
setExpressions.put(name, node.getAssignmentItems().get(index).getValue());
}
}
List<String> updatedColumnNames = updatedColumnNamesBuilder.build();
List<Type> updatedColumnTypes = updatedColumnTypesBuilder.build();
List<Expression> orderedColumnValues = orderedColumnValuesBuilder.build();
// create table scan
RelationPlan relationPlan = new RelationPlanner(analysis, planSymbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session, namedSubPlan, uniqueIdAllocator).process(table, null);
PlanBuilder builder = planBuilderFor(relationPlan);
if (node.getWhere().isPresent()) {
builder = filter(builder, node.getWhere().get(), node);
}
builder = builder.appendProjections(orderedColumnValues, planSymbolAllocator, idAllocator);
PlanAndMappings planAndMappings = coerce(builder, orderedColumnValues, analysis, idAllocator, planSymbolAllocator, typeCoercion);
builder = planAndMappings.getSubPlan();
ImmutableList.Builder<Symbol> updatedColumnValuesBuilder = ImmutableList.builder();
orderedColumnValues.forEach(columnValue -> updatedColumnValuesBuilder.add(planAndMappings.get(columnValue)));
Symbol rowId = builder.translate(analysis.getRowIdField(table));
updatedColumnValuesBuilder.add(rowId);
List<Symbol> outputs = ImmutableList.of(planSymbolAllocator.newSymbol("partialrows", BIGINT), planSymbolAllocator.newSymbol("fragment", VARBINARY));
Optional<PlanNodeId> tableScanId = getIdForLeftTableScan(relationPlan.getRoot());
checkArgument(tableScanId.isPresent(), "tableScanId not present");
// create update node
return new UpdateNode(idAllocator.getNextId(), builder.getRoot(), new TableWriterNode.UpdateTarget(handle, metadata.getTableMetadata(session, handle).getTable(), updatedColumnNames, updatedColumnTypes), rowId, updatedColumnValuesBuilder.build(), outputs, setExpressions.build());
}
use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.
the class OrcTester method preprocessWriteValueHive.
private static Object preprocessWriteValueHive(Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type instanceof CharType) {
return new HiveChar((String) value, ((CharType) type).getLength());
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP)) {
return Timestamp.ofEpochMilli(((SqlTimestamp) value).getMillis());
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type.getTypeSignature().getBase().equals(StandardTypes.ARRAY)) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
}
if (type.getTypeSignature().getBase().equals(StandardTypes.MAP)) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
}
return newMap;
}
if (type.getTypeSignature().getBase().equals(StandardTypes.ROW)) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
Aggregations