Search in sources :

Example 1 with VARBINARY

use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.

the class RcFileTester method preprocessWriteValueOld.

private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP)) {
        long millis = ((SqlTimestamp) value).getMillis();
        if (format == Format.BINARY) {
            millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
        }
        return Timestamp.ofEpochMilli(millis);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type.getTypeSignature().getBase().equals(ARRAY)) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
    }
    if (type.getTypeSignature().getBase().equals(MAP)) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type.getTypeSignature().getBase().equals(ROW)) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : SnappyCodec(org.apache.hadoop.io.compress.SnappyCodec) DateTimeZone(org.joda.time.DateTimeZone) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) FileSplit(org.apache.hadoop.mapred.FileSplit) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) RowType(io.prestosql.spi.type.RowType) NONE(io.prestosql.rcfile.RcFileTester.Compression.NONE) BigInteger(java.math.BigInteger) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) IntWritable(org.apache.hadoop.io.IntWritable) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) BytesRefArrayWritable(org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable) META_TABLE_COLUMN_TYPES(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) TIMESTAMP(io.prestosql.spi.type.TimestampType.TIMESTAMP) Metadata(io.prestosql.metadata.Metadata) UncheckedIOException(java.io.UncheckedIOException) LzoCodec(com.hadoop.compression.lzo.LzoCodec) BooleanWritable(org.apache.hadoop.io.BooleanWritable) RecordReader(org.apache.hadoop.mapred.RecordReader) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Lz4Codec(org.apache.hadoop.io.compress.Lz4Codec) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) StructObject(org.apache.hadoop.hive.serde2.StructObject) Functions.constant(com.google.common.base.Functions.constant) TypeSignatureParameter(io.prestosql.spi.type.TypeSignatureParameter) META_TABLE_COLUMNS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS) Decimals.rescale(io.prestosql.spi.type.Decimals.rescale) ArrayList(java.util.ArrayList) MapType(io.prestosql.spi.type.MapType) Lists(com.google.common.collect.Lists) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) VARCHAR(io.prestosql.spi.type.VarcharType.VARCHAR) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) DOUBLE(io.prestosql.spi.type.DoubleType.DOUBLE) ROW(io.prestosql.spi.type.StandardTypes.ROW) LinkedHashSet(java.util.LinkedHashSet) HadoopNative(io.prestosql.hadoop.HadoopNative) Properties(java.util.Properties) RcFileDecoderUtils.findFirstSyncPosition(io.prestosql.rcfile.RcFileDecoderUtils.findFirstSyncPosition) AbstractIterator(com.google.common.collect.AbstractIterator) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) NULL(org.apache.hadoop.mapred.Reporter.NULL) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) DateTimeTestingUtils.sqlTimestampOf(io.prestosql.testing.DateTimeTestingUtils.sqlTimestampOf) SIZE_OF_LONG(io.airlift.slice.SizeOf.SIZE_OF_LONG) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FloatWritable(org.apache.hadoop.io.FloatWritable) VarcharType(io.prestosql.spi.type.VarcharType) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) Iterables.transform(com.google.common.collect.Iterables.transform) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) DecimalType(io.prestosql.spi.type.DecimalType) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) InputFormat(org.apache.hadoop.mapred.InputFormat) Path(org.apache.hadoop.fs.Path) BOOLEAN(io.prestosql.spi.type.BooleanType.BOOLEAN) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) Type(io.prestosql.spi.type.Type) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) SIZE_OF_INT(io.airlift.slice.SizeOf.SIZE_OF_INT) ARRAY(io.prestosql.spi.type.StandardTypes.ARRAY) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) BlockBuilder(io.prestosql.spi.block.BlockBuilder) MetadataManager.createTestMetadataManager(io.prestosql.metadata.MetadataManager.createTestMetadataManager) Collections.nCopies(java.util.Collections.nCopies) ArrayType(io.prestosql.spi.type.ArrayType) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) TINYINT(io.prestosql.spi.type.TinyintType.TINYINT) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY(io.prestosql.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY) ZLIB(io.prestosql.rcfile.RcFileTester.Compression.ZLIB) Entry(java.util.Map.Entry) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Assert.assertNull(org.testng.Assert.assertNull) SESSION(io.prestosql.testing.TestingConnectorSession.SESSION) TextRcFileEncoding(io.prestosql.rcfile.text.TextRcFileEncoding) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) SqlDate(io.prestosql.spi.type.SqlDate) Assert.assertEquals(org.testng.Assert.assertEquals) Decimals(io.prestosql.spi.type.Decimals) HashMap(java.util.HashMap) INTEGER(io.prestosql.spi.type.IntegerType.INTEGER) SqlDecimal(io.prestosql.spi.type.SqlDecimal) DoubleWritable(org.apache.hadoop.io.DoubleWritable) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) OutputStreamSliceOutput(io.airlift.slice.OutputStreamSliceOutput) COMPRESS_CODEC(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC) PRESTO_RCFILE_WRITER_VERSION(io.prestosql.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) MAP(io.prestosql.spi.type.StandardTypes.MAP) BytesWritable(org.apache.hadoop.io.BytesWritable) DATE(io.prestosql.spi.type.DateType.DATE) REAL(io.prestosql.spi.type.RealType.REAL) Math.toIntExact(java.lang.Math.toIntExact) Block(io.prestosql.spi.block.Block) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) Page(io.prestosql.spi.Page) FileInputStream(java.io.FileInputStream) BinaryRcFileEncoding(io.prestosql.rcfile.binary.BinaryRcFileEncoding) BZIP2(io.prestosql.rcfile.RcFileTester.Compression.BZIP2) LZ4(io.prestosql.rcfile.RcFileTester.Compression.LZ4) JobConf(org.apache.hadoop.mapred.JobConf) BZip2Codec(org.apache.hadoop.io.compress.BZip2Codec) Collectors.toList(java.util.stream.Collectors.toList) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) SMALLINT(io.prestosql.spi.type.SmallintType.SMALLINT) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Closeable(java.io.Closeable) Assert.assertTrue(org.testng.Assert.assertTrue) SNAPPY(io.prestosql.rcfile.RcFileTester.Compression.SNAPPY) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Collections(java.util.Collections) BYTE(io.airlift.units.DataSize.Unit.BYTE) InputStream(java.io.InputStream) VarcharType(io.prestosql.spi.type.VarcharType) HashMap(java.util.HashMap) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) ArrayList(java.util.ArrayList) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) RowType(io.prestosql.spi.type.RowType) MapType(io.prestosql.spi.type.MapType) VarcharType(io.prestosql.spi.type.VarcharType) DecimalType(io.prestosql.spi.type.DecimalType) Type(io.prestosql.spi.type.Type) ArrayType(io.prestosql.spi.type.ArrayType) DecimalType(io.prestosql.spi.type.DecimalType) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) StructObject(org.apache.hadoop.hive.serde2.StructObject) Map(java.util.Map) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) ImmutableMap(com.google.common.collect.ImmutableMap) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) HashMap(java.util.HashMap)

Example 2 with VARBINARY

use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.

the class ElasticsearchMetadata method toPrestoType.

private Type toPrestoType(IndexMetadata.Field metaDataField, boolean isArray) {
    IndexMetadata.Type type = metaDataField.getType();
    if (isArray) {
        Type elementType = toPrestoType(metaDataField, false);
        return new ArrayType(elementType);
    }
    if (type instanceof PrimitiveType) {
        switch(((PrimitiveType) type).getName()) {
            case "float":
                return REAL;
            case "double":
                return DOUBLE;
            case "byte":
                return TINYINT;
            case "short":
                return SMALLINT;
            case "integer":
                return INTEGER;
            case "long":
                return BIGINT;
            case "string":
            case "text":
            case "keyword":
                return VARCHAR;
            case "ip":
                return ipAddressType;
            case "boolean":
                return BOOLEAN;
            case "binary":
                return VARBINARY;
            default:
                break;
        }
    } else if (type instanceof DateTimeType) {
        if (((DateTimeType) type).getFormats().isEmpty()) {
            return TIMESTAMP;
        }
    // otherwise, skip -- we don't support custom formats, yet
    } else if (type instanceof ObjectType) {
        ObjectType objectType = (ObjectType) type;
        List<RowType.Field> fields = objectType.getFields().stream().map(field -> RowType.field(field.getName(), toPrestoType(field))).collect(toImmutableList());
        return RowType.from(fields);
    }
    return null;
}
Also used : ArrayType(io.prestosql.spi.type.ArrayType) ConnectorMetadata(io.prestosql.spi.connector.ConnectorMetadata) StandardTypes(io.prestosql.spi.type.StandardTypes) INTEGER(io.prestosql.spi.type.IntegerType.INTEGER) DateTimeType(io.prestosql.elasticsearch.client.IndexMetadata.DateTimeType) PrimitiveType(io.prestosql.elasticsearch.client.IndexMetadata.PrimitiveType) Inject(javax.inject.Inject) ObjectType(io.prestosql.elasticsearch.client.IndexMetadata.ObjectType) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) VARCHAR(io.prestosql.spi.type.VarcharType.VARCHAR) ImmutableList(com.google.common.collect.ImmutableList) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) IndexMetadata(io.prestosql.elasticsearch.client.IndexMetadata) ConnectorTableProperties(io.prestosql.spi.connector.ConnectorTableProperties) Map(java.util.Map) Objects.requireNonNull(java.util.Objects.requireNonNull) RowType(io.prestosql.spi.type.RowType) BOOLEAN(io.prestosql.spi.type.BooleanType.BOOLEAN) DOUBLE(io.prestosql.spi.type.DoubleType.DOUBLE) Type(io.prestosql.spi.type.Type) REAL(io.prestosql.spi.type.RealType.REAL) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) ENGLISH(java.util.Locale.ENGLISH) Constraint(io.prestosql.spi.connector.Constraint) ConstraintApplicationResult(io.prestosql.spi.connector.ConstraintApplicationResult) ImmutableMap(com.google.common.collect.ImmutableMap) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) TupleDomain(io.prestosql.spi.predicate.TupleDomain) ArrayType(io.prestosql.spi.type.ArrayType) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TypeManager(io.prestosql.spi.type.TypeManager) TIMESTAMP(io.prestosql.spi.type.TimestampType.TIMESTAMP) TINYINT(io.prestosql.spi.type.TinyintType.TINYINT) Collectors(java.util.stream.Collectors) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) List(java.util.List) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) SMALLINT(io.prestosql.spi.type.SmallintType.SMALLINT) Optional(java.util.Optional) TypeSignature(io.prestosql.spi.type.TypeSignature) SchemaTablePrefix(io.prestosql.spi.connector.SchemaTablePrefix) ObjectType(io.prestosql.elasticsearch.client.IndexMetadata.ObjectType) DateTimeType(io.prestosql.elasticsearch.client.IndexMetadata.DateTimeType) PrimitiveType(io.prestosql.elasticsearch.client.IndexMetadata.PrimitiveType) ObjectType(io.prestosql.elasticsearch.client.IndexMetadata.ObjectType) RowType(io.prestosql.spi.type.RowType) Type(io.prestosql.spi.type.Type) ArrayType(io.prestosql.spi.type.ArrayType) DateTimeType(io.prestosql.elasticsearch.client.IndexMetadata.DateTimeType) PrimitiveType(io.prestosql.elasticsearch.client.IndexMetadata.PrimitiveType) IndexMetadata(io.prestosql.elasticsearch.client.IndexMetadata)

Example 3 with VARBINARY

use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.

the class QueryPlanner method plan.

public UpdateNode plan(Update node) {
    Table table = node.getTable();
    TableHandle handle = analysis.getTableHandle(table);
    TableMetadata tableMetadata = metadata.getTableMetadata(session, handle);
    List<ColumnMetadata> dataColumns = tableMetadata.getMetadata().getColumns().stream().filter(column -> !column.isHidden()).collect(toImmutableList());
    List<String> targetColumnNames = node.getAssignmentItems().stream().map(assignment -> assignment.getName().toString()).collect(toImmutableList());
    // Create lists of columnnames and SET expressions, in table column order
    ImmutableList.Builder<String> updatedColumnNamesBuilder = ImmutableList.builder();
    ImmutableList.Builder<Type> updatedColumnTypesBuilder = ImmutableList.builder();
    ImmutableList.Builder<Expression> orderedColumnValuesBuilder = ImmutableList.builder();
    ImmutableMap.Builder<String, Expression> setExpressions = new ImmutableMap.Builder<>();
    for (ColumnMetadata columnMetadata : dataColumns) {
        String name = columnMetadata.getName();
        Type type = columnMetadata.getType();
        int index = targetColumnNames.indexOf(name);
        if (index >= 0) {
            updatedColumnNamesBuilder.add(name);
            updatedColumnTypesBuilder.add(type);
            orderedColumnValuesBuilder.add(node.getAssignmentItems().get(index).getValue());
            setExpressions.put(name, node.getAssignmentItems().get(index).getValue());
        }
    }
    List<String> updatedColumnNames = updatedColumnNamesBuilder.build();
    List<Type> updatedColumnTypes = updatedColumnTypesBuilder.build();
    List<Expression> orderedColumnValues = orderedColumnValuesBuilder.build();
    // create table scan
    RelationPlan relationPlan = new RelationPlanner(analysis, planSymbolAllocator, idAllocator, lambdaDeclarationToSymbolMap, metadata, session, namedSubPlan, uniqueIdAllocator).process(table, null);
    PlanBuilder builder = planBuilderFor(relationPlan);
    if (node.getWhere().isPresent()) {
        builder = filter(builder, node.getWhere().get(), node);
    }
    builder = builder.appendProjections(orderedColumnValues, planSymbolAllocator, idAllocator);
    PlanAndMappings planAndMappings = coerce(builder, orderedColumnValues, analysis, idAllocator, planSymbolAllocator, typeCoercion);
    builder = planAndMappings.getSubPlan();
    ImmutableList.Builder<Symbol> updatedColumnValuesBuilder = ImmutableList.builder();
    orderedColumnValues.forEach(columnValue -> updatedColumnValuesBuilder.add(planAndMappings.get(columnValue)));
    Symbol rowId = builder.translate(analysis.getRowIdField(table));
    updatedColumnValuesBuilder.add(rowId);
    List<Symbol> outputs = ImmutableList.of(planSymbolAllocator.newSymbol("partialrows", BIGINT), planSymbolAllocator.newSymbol("fragment", VARBINARY));
    Optional<PlanNodeId> tableScanId = getIdForLeftTableScan(relationPlan.getRoot());
    checkArgument(tableScanId.isPresent(), "tableScanId not present");
    // create update node
    return new UpdateNode(idAllocator.getNextId(), builder.getRoot(), new TableWriterNode.UpdateTarget(handle, metadata.getTableMetadata(session, handle).getTable(), updatedColumnNames, updatedColumnTypes), rowId, updatedColumnValuesBuilder.build(), outputs, setExpressions.build());
}
Also used : Table(io.prestosql.sql.tree.Table) SortNode(io.prestosql.sql.planner.plan.SortNode) Property(io.prestosql.sql.tree.Property) SortOrder(io.prestosql.spi.block.SortOrder) AggregationNode(io.prestosql.spi.plan.AggregationNode) Cast(io.prestosql.sql.tree.Cast) HeuristicIndexUtils(io.prestosql.utils.HeuristicIndexUtils) Statement(io.prestosql.sql.tree.Statement) HetuConstant(io.prestosql.spi.HetuConstant) Map(java.util.Map) OriginalExpressionUtils.castToRowExpression(io.prestosql.sql.relational.OriginalExpressionUtils.castToRowExpression) FetchFirst(io.prestosql.sql.tree.FetchFirst) Identifier(io.prestosql.sql.tree.Identifier) PlanNodeId(io.prestosql.spi.plan.PlanNodeId) CreateIndexNode(io.prestosql.sql.planner.plan.CreateIndexNode) AssignmentItem(io.prestosql.sql.tree.AssignmentItem) Delete(io.prestosql.sql.tree.Delete) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TableScanNode(io.prestosql.spi.plan.TableScanNode) Set(java.util.Set) PlanNode(io.prestosql.spi.plan.PlanNode) ProjectNode(io.prestosql.spi.plan.ProjectNode) Metadata(io.prestosql.metadata.Metadata) NodeRef(io.prestosql.sql.tree.NodeRef) LambdaExpression(io.prestosql.sql.tree.LambdaExpression) SymbolUtils.toSymbolReference(io.prestosql.sql.planner.SymbolUtils.toSymbolReference) ReuseExchangeOperator(io.prestosql.spi.operator.ReuseExchangeOperator) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) SymbolReference(io.prestosql.sql.tree.SymbolReference) StringLiteral(io.prestosql.sql.tree.StringLiteral) LEVEL_PROP_KEY(io.prestosql.spi.connector.CreateIndexMetadata.LEVEL_PROP_KEY) SymbolUtils.from(io.prestosql.sql.planner.SymbolUtils.from) Field(io.prestosql.sql.analyzer.Field) OriginalExpressionUtils(io.prestosql.sql.relational.OriginalExpressionUtils) GroupIdNode(io.prestosql.spi.plan.GroupIdNode) Iterables(com.google.common.collect.Iterables) TableMetadata(io.prestosql.metadata.TableMetadata) NodeUtils.getSortItemsFromOrderBy(io.prestosql.sql.NodeUtils.getSortItemsFromOrderBy) Node(io.prestosql.sql.tree.Node) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) OrderingSchemeUtils.sortItemToSortOrder(io.prestosql.sql.planner.OrderingSchemeUtils.sortItemToSortOrder) MetadataUtil(io.prestosql.metadata.MetadataUtil) Session(io.prestosql.Session) ImmutableSet.toImmutableSet(com.google.common.collect.ImmutableSet.toImmutableSet) FrameBoundType(io.prestosql.spi.sql.expression.Types.FrameBoundType) DeleteNode(io.prestosql.sql.planner.plan.DeleteNode) AssignmentUtils(io.prestosql.sql.planner.plan.AssignmentUtils) Properties(java.util.Properties) Query(io.prestosql.sql.tree.Query) Assignments(io.prestosql.spi.plan.Assignments) Streams.stream(com.google.common.collect.Streams.stream) DeleteTarget(io.prestosql.sql.planner.plan.TableWriterNode.DeleteTarget) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) ValuesNode(io.prestosql.spi.plan.ValuesNode) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) CreateIndexMetadata(io.prestosql.spi.connector.CreateIndexMetadata) WindowNode(io.prestosql.spi.plan.WindowNode) LimitNode(io.prestosql.spi.plan.LimitNode) Expression(io.prestosql.sql.tree.Expression) OffsetNode(io.prestosql.sql.planner.plan.OffsetNode) CURRENT_ROW(io.prestosql.spi.sql.expression.Types.FrameBoundType.CURRENT_ROW) UpdateIndexNode(io.prestosql.sql.planner.plan.UpdateIndexNode) UNBOUNDED_PRECEDING(io.prestosql.spi.sql.expression.Types.FrameBoundType.UNBOUNDED_PRECEDING) QualifiedName(io.prestosql.sql.tree.QualifiedName) FieldReference(io.prestosql.sql.tree.FieldReference) ExchangeNode(io.prestosql.sql.planner.plan.ExchangeNode) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) FilterNode(io.prestosql.spi.plan.FilterNode) Expressions.call(io.prestosql.sql.relational.Expressions.call) WindowFrame(io.prestosql.sql.tree.WindowFrame) Locale(java.util.Locale) Window(io.prestosql.sql.tree.Window) Type(io.prestosql.spi.type.Type) QuerySpecification(io.prestosql.sql.tree.QuerySpecification) TypeCoercion(io.prestosql.type.TypeCoercion) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) AggregationNode.singleGroupingSet(io.prestosql.spi.plan.AggregationNode.singleGroupingSet) ImmutableSet(com.google.common.collect.ImmutableSet) WindowFrameType(io.prestosql.spi.sql.expression.Types.WindowFrameType) ImmutableMap(com.google.common.collect.ImmutableMap) GroupingOperation(io.prestosql.sql.tree.GroupingOperation) SystemSessionProperties.isSkipRedundantSort(io.prestosql.SystemSessionProperties.isSkipRedundantSort) RANGE(io.prestosql.spi.sql.expression.Types.WindowFrameType.RANGE) CreateIndex(io.prestosql.sql.tree.CreateIndex) PropertyService(io.prestosql.spi.service.PropertyService) UUID(java.util.UUID) RelationType(io.prestosql.sql.analyzer.RelationType) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) Pair(io.prestosql.spi.heuristicindex.Pair) String.format(java.lang.String.format) Scope(io.prestosql.sql.analyzer.Scope) List(java.util.List) AggregationNode.groupingSets(io.prestosql.spi.plan.AggregationNode.groupingSets) Entry(java.util.Map.Entry) Optional(java.util.Optional) Analysis(io.prestosql.sql.analyzer.Analysis) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) FieldId(io.prestosql.sql.analyzer.FieldId) IntStream(java.util.stream.IntStream) UpdateIndex(io.prestosql.sql.tree.UpdateIndex) UpdateIndexMetadata(io.prestosql.spi.connector.UpdateIndexMetadata) HashMap(java.util.HashMap) RelationId(io.prestosql.sql.analyzer.RelationId) TableHandle(io.prestosql.spi.metadata.TableHandle) DecimalLiteral(io.prestosql.sql.tree.DecimalLiteral) INDEX_SUPPORTED_TYPES(io.prestosql.spi.connector.CreateIndexMetadata.INDEX_SUPPORTED_TYPES) ImmutableList(com.google.common.collect.ImmutableList) FunctionCall(io.prestosql.sql.tree.FunctionCall) OrderingScheme(io.prestosql.spi.plan.OrderingScheme) Objects.requireNonNull(java.util.Objects.requireNonNull) SortItem(io.prestosql.sql.tree.SortItem) Symbol(io.prestosql.spi.plan.Symbol) TableWriterNode(io.prestosql.sql.planner.plan.TableWriterNode) Iterator(java.util.Iterator) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) OrderBy(io.prestosql.sql.tree.OrderBy) UpdateNode(io.prestosql.sql.planner.plan.UpdateNode) LambdaArgumentDeclaration(io.prestosql.sql.tree.LambdaArgumentDeclaration) Update(io.prestosql.sql.tree.Update) Offset(io.prestosql.sql.tree.Offset) AUTOLOAD_PROP_KEY(io.prestosql.spi.connector.CreateIndexMetadata.AUTOLOAD_PROP_KEY) Aggregation(io.prestosql.spi.plan.AggregationNode.Aggregation) PlanNodeIdAllocator(io.prestosql.spi.plan.PlanNodeIdAllocator) RowExpression(io.prestosql.spi.relation.RowExpression) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ImmutableList(com.google.common.collect.ImmutableList) Symbol(io.prestosql.spi.plan.Symbol) PlanNodeId(io.prestosql.spi.plan.PlanNodeId) TableWriterNode(io.prestosql.sql.planner.plan.TableWriterNode) TableMetadata(io.prestosql.metadata.TableMetadata) UpdateNode(io.prestosql.sql.planner.plan.UpdateNode) Table(io.prestosql.sql.tree.Table) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) FrameBoundType(io.prestosql.spi.sql.expression.Types.FrameBoundType) Type(io.prestosql.spi.type.Type) WindowFrameType(io.prestosql.spi.sql.expression.Types.WindowFrameType) RelationType(io.prestosql.sql.analyzer.RelationType) OriginalExpressionUtils.castToRowExpression(io.prestosql.sql.relational.OriginalExpressionUtils.castToRowExpression) LambdaExpression(io.prestosql.sql.tree.LambdaExpression) Expression(io.prestosql.sql.tree.Expression) RowExpression(io.prestosql.spi.relation.RowExpression) TableHandle(io.prestosql.spi.metadata.TableHandle)

Example 4 with VARBINARY

use of io.prestosql.spi.type.VarbinaryType.VARBINARY in project hetu-core by openlookeng.

the class OrcTester method preprocessWriteValueHive.

private static Object preprocessWriteValueHive(Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type instanceof CharType) {
        return new HiveChar((String) value, ((CharType) type).getLength());
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP)) {
        return Timestamp.ofEpochMilli(((SqlTimestamp) value).getMillis());
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.ARRAY)) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.MAP)) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type.getTypeSignature().getBase().equals(StandardTypes.ROW)) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : OrcUtil(org.apache.hadoop.hive.ql.io.orc.OrcUtil) DateTimeZone(org.joda.time.DateTimeZone) LZ4(io.prestosql.orc.metadata.CompressionKind.LZ4) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) ZSTD(io.prestosql.orc.metadata.CompressionKind.ZSTD) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) BigDecimal(java.math.BigDecimal) Arrays.asList(java.util.Arrays.asList) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) RowFieldName(io.prestosql.spi.type.RowFieldName) BigInteger(java.math.BigInteger) Slices.utf8Slice(io.airlift.slice.Slices.utf8Slice) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) IntWritable(org.apache.hadoop.io.IntWritable) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) Set(java.util.Set) VarbinaryType(io.prestosql.spi.type.VarbinaryType) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) TIMESTAMP(io.prestosql.spi.type.TimestampType.TIMESTAMP) ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) Metadata(io.prestosql.metadata.Metadata) StandardCharsets(java.nio.charset.StandardCharsets) NONE(io.prestosql.orc.metadata.CompressionKind.NONE) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) FileAssert.fail(org.testng.FileAssert.fail) CompressionKind(io.prestosql.orc.metadata.CompressionKind) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) StandardTypes(io.prestosql.spi.type.StandardTypes) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) CharType(io.prestosql.spi.type.CharType) DataSize.succinctBytes(io.airlift.units.DataSize.succinctBytes) TypeSignatureParameter(io.prestosql.spi.type.TypeSignatureParameter) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) Decimals.rescale(io.prestosql.spi.type.Decimals.rescale) ArrayList(java.util.ArrayList) Lists(com.google.common.collect.Lists) VARCHAR(io.prestosql.spi.type.VarcharType.VARCHAR) SNAPPY(io.prestosql.orc.metadata.CompressionKind.SNAPPY) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) BOTH(io.prestosql.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH) DOUBLE(io.prestosql.spi.type.DoubleType.DOUBLE) Properties(java.util.Properties) AbstractIterator(com.google.common.collect.AbstractIterator) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) VARBINARY(io.prestosql.spi.type.VarbinaryType.VARBINARY) DateTimeTestingUtils.sqlTimestampOf(io.prestosql.testing.DateTimeTestingUtils.sqlTimestampOf) Chars.truncateToLengthAndTrimSpaces(io.prestosql.spi.type.Chars.truncateToLengthAndTrimSpaces) FloatWritable(org.apache.hadoop.io.FloatWritable) VarcharType(io.prestosql.spi.type.VarcharType) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) OrcFile(org.apache.hadoop.hive.ql.io.orc.OrcFile) DecimalType(io.prestosql.spi.type.DecimalType) LongWritable(org.apache.hadoop.io.LongWritable) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) ORC_12(io.prestosql.orc.OrcTester.Format.ORC_12) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) ORC_11(io.prestosql.orc.OrcTester.Format.ORC_11) OrcConf(org.apache.orc.OrcConf) RecordReader(org.apache.hadoop.hive.ql.io.orc.RecordReader) Path(org.apache.hadoop.fs.Path) BOOLEAN(io.prestosql.spi.type.BooleanType.BOOLEAN) Reader(org.apache.hadoop.hive.ql.io.orc.Reader) Type(io.prestosql.spi.type.Type) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BIGINT(io.prestosql.spi.type.BigintType.BIGINT) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) BlockBuilder(io.prestosql.spi.block.BlockBuilder) MetadataManager.createTestMetadataManager(io.prestosql.metadata.MetadataManager.createTestMetadataManager) TINYINT(io.prestosql.spi.type.TinyintType.TINYINT) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) ZLIB(io.prestosql.orc.metadata.CompressionKind.ZLIB) Entry(java.util.Map.Entry) Varchars.truncateToLength(io.prestosql.spi.type.Varchars.truncateToLength) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) AggregatedMemoryContext.newSimpleAggregatedMemoryContext(io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext) IntStream(java.util.stream.IntStream) TypeInfoFactory.getCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo) SqlTimestamp(io.prestosql.spi.type.SqlTimestamp) Assert.assertNull(org.testng.Assert.assertNull) Logger(io.airlift.log.Logger) SESSION(io.prestosql.testing.TestingConnectorSession.SESSION) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) SqlDate(io.prestosql.spi.type.SqlDate) Assert.assertEquals(org.testng.Assert.assertEquals) Decimals(io.prestosql.spi.type.Decimals) HashMap(java.util.HashMap) INTEGER(io.prestosql.spi.type.IntegerType.INTEGER) SqlDecimal(io.prestosql.spi.type.SqlDecimal) DoubleWritable(org.apache.hadoop.io.DoubleWritable) Function(java.util.function.Function) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) HashSet(java.util.HashSet) NamedTypeSignature(io.prestosql.spi.type.NamedTypeSignature) MAX_BATCH_SIZE(io.prestosql.orc.OrcReader.MAX_BATCH_SIZE) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DATE(io.prestosql.spi.type.DateType.DATE) REAL(io.prestosql.spi.type.RealType.REAL) ObjectInspectorFactory.getStandardMapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardMapObjectInspector) Block(io.prestosql.spi.block.Block) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) Page(io.prestosql.spi.Page) ObjectInspectorFactory.getStandardListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardListObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) SMALLINT(io.prestosql.spi.type.SmallintType.SMALLINT) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) TestingOrcPredicate.createOrcPredicate(io.prestosql.orc.TestingOrcPredicate.createOrcPredicate) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) VarcharType(io.prestosql.spi.type.VarcharType) HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SqlVarbinary(io.prestosql.spi.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) VarbinaryType(io.prestosql.spi.type.VarbinaryType) CharType(io.prestosql.spi.type.CharType) VarcharType(io.prestosql.spi.type.VarcharType) DecimalType(io.prestosql.spi.type.DecimalType) Type(io.prestosql.spi.type.Type) DecimalType(io.prestosql.spi.type.DecimalType) Arrays.asList(java.util.Arrays.asList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) CharType(io.prestosql.spi.type.CharType) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Aggregations

ImmutableList (com.google.common.collect.ImmutableList)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 BIGINT (io.prestosql.spi.type.BigintType.BIGINT)4 Type (io.prestosql.spi.type.Type)4 VARBINARY (io.prestosql.spi.type.VarbinaryType.VARBINARY)4 List (java.util.List)4 Map (java.util.Map)4 Optional (java.util.Optional)4 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)3 ImmutableSet (com.google.common.collect.ImmutableSet)3 Iterables (com.google.common.collect.Iterables)3 AbstractIterator (com.google.common.collect.AbstractIterator)2 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)2 Iterators.advance (com.google.common.collect.Iterators.advance)2 Lists (com.google.common.collect.Lists)2 Slice (io.airlift.slice.Slice)2 Slices (io.airlift.slice.Slices)2 DataSize (io.airlift.units.DataSize)2 MEGABYTE (io.airlift.units.DataSize.Unit.MEGABYTE)2 Metadata (io.prestosql.metadata.Metadata)2