Search in sources :

Example 1 with SqlDate

use of io.trino.spi.type.SqlDate in project trino by trinodb.

the class LiteralEncoder method toExpression.

public Expression toExpression(Session session, Object object, Type type) {
    requireNonNull(type, "type is null");
    if (object instanceof Expression) {
        return (Expression) object;
    }
    if (object == null) {
        if (type.equals(UNKNOWN)) {
            return new NullLiteral();
        }
        return new Cast(new NullLiteral(), toSqlType(type), false, true);
    }
    checkArgument(Primitives.wrap(type.getJavaType()).isInstance(object), "object.getClass (%s) and type.getJavaType (%s) do not agree", object.getClass(), type.getJavaType());
    if (type.equals(TINYINT)) {
        return new GenericLiteral("TINYINT", object.toString());
    }
    if (type.equals(SMALLINT)) {
        return new GenericLiteral("SMALLINT", object.toString());
    }
    if (type.equals(INTEGER)) {
        return new LongLiteral(object.toString());
    }
    if (type.equals(BIGINT)) {
        LongLiteral expression = new LongLiteral(object.toString());
        if (expression.getValue() >= Integer.MIN_VALUE && expression.getValue() <= Integer.MAX_VALUE) {
            return new GenericLiteral("BIGINT", object.toString());
        }
        return new LongLiteral(object.toString());
    }
    if (type.equals(DOUBLE)) {
        Double value = (Double) object;
        if (value.isNaN()) {
            return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build();
        }
        if (value.equals(Double.NEGATIVE_INFINITY)) {
            return ArithmeticUnaryExpression.negative(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build());
        }
        if (value.equals(Double.POSITIVE_INFINITY)) {
            return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build();
        }
        return new DoubleLiteral(object.toString());
    }
    if (type.equals(REAL)) {
        Float value = intBitsToFloat(((Long) object).intValue());
        if (value.isNaN()) {
            return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("nan")).build(), toSqlType(REAL));
        }
        if (value.equals(Float.NEGATIVE_INFINITY)) {
            return ArithmeticUnaryExpression.negative(new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL)));
        }
        if (value.equals(Float.POSITIVE_INFINITY)) {
            return new Cast(FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("infinity")).build(), toSqlType(REAL));
        }
        return new GenericLiteral("REAL", value.toString());
    }
    if (type instanceof DecimalType) {
        String string;
        if (isShortDecimal(type)) {
            string = Decimals.toString((long) object, ((DecimalType) type).getScale());
        } else {
            string = Decimals.toString((Int128) object, ((DecimalType) type).getScale());
        }
        return new Cast(new DecimalLiteral(string), toSqlType(type));
    }
    if (type instanceof VarcharType) {
        VarcharType varcharType = (VarcharType) type;
        Slice value = (Slice) object;
        if (varcharType.isUnbounded()) {
            return new GenericLiteral("VARCHAR", value.toStringUtf8());
        }
        StringLiteral stringLiteral = new StringLiteral(value.toStringUtf8());
        int boundedLength = varcharType.getBoundedLength();
        int valueLength = SliceUtf8.countCodePoints(value);
        if (boundedLength == valueLength) {
            return stringLiteral;
        }
        if (boundedLength > valueLength) {
            return new Cast(stringLiteral, toSqlType(type), false, true);
        }
        throw new IllegalArgumentException(format("Value [%s] does not fit in type %s", value.toStringUtf8(), varcharType));
    }
    if (type instanceof CharType) {
        StringLiteral stringLiteral = new StringLiteral(((Slice) object).toStringUtf8());
        return new Cast(stringLiteral, toSqlType(type), false, true);
    }
    if (type.equals(BOOLEAN)) {
        return new BooleanLiteral(object.toString());
    }
    if (type.equals(DATE)) {
        return new GenericLiteral("DATE", new SqlDate(toIntExact((Long) object)).toString());
    }
    if (type instanceof TimestampType) {
        TimestampType timestampType = (TimestampType) type;
        String representation;
        if (timestampType.isShort()) {
            representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (Long) object).toStringUtf8();
        } else {
            representation = TimestampToVarcharCast.cast(timestampType.getPrecision(), (LongTimestamp) object).toStringUtf8();
        }
        return new TimestampLiteral(representation);
    }
    if (type instanceof TimestampWithTimeZoneType) {
        TimestampWithTimeZoneType timestampWithTimeZoneType = (TimestampWithTimeZoneType) type;
        String representation;
        if (timestampWithTimeZoneType.isShort()) {
            representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (long) object).toStringUtf8();
        } else {
            representation = TimestampWithTimeZoneToVarcharCast.cast(timestampWithTimeZoneType.getPrecision(), (LongTimestampWithTimeZone) object).toStringUtf8();
        }
        if (!object.equals(parseTimestampWithTimeZone(timestampWithTimeZoneType.getPrecision(), representation))) {
        // Certain (point in time, time zone) pairs cannot be represented as a TIMESTAMP literal, as the literal uses local date/time in given time zone.
        // Thus, during DST backwards change by e.g. 1 hour, the local time is "repeated" twice and thus one local date/time logically corresponds to two
        // points in time, leaving one of them non-referencable.
        // TODO (https://github.com/trinodb/trino/issues/5781) consider treating such values as illegal
        } else {
            return new TimestampLiteral(representation);
        }
    }
    // If the stack value is not a simple type, encode the stack value in a block
    if (!type.getJavaType().isPrimitive() && type.getJavaType() != Slice.class && type.getJavaType() != Block.class) {
        object = nativeValueToBlock(type, object);
    }
    if (object instanceof Block) {
        SliceOutput output = new DynamicSliceOutput(toIntExact(((Block) object).getSizeInBytes()));
        BlockSerdeUtil.writeBlock(plannerContext.getBlockEncodingSerde(), output, (Block) object);
        object = output.slice();
    // This if condition will evaluate to true: object instanceof Slice && !type.equals(VARCHAR)
    }
    Type argumentType = typeForMagicLiteral(type);
    Expression argument;
    if (object instanceof Slice) {
        // HACK: we need to serialize VARBINARY in a format that can be embedded in an expression to be
        // able to encode it in the plan that gets sent to workers.
        // We do this by transforming the in-memory varbinary into a call to from_base64(<base64-encoded value>)
        Slice encoded = VarbinaryFunctions.toBase64((Slice) object);
        argument = FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(QualifiedName.of("from_base64")).addArgument(VARCHAR, new StringLiteral(encoded.toStringUtf8())).build();
    } else {
        argument = toExpression(session, object, argumentType);
    }
    ResolvedFunction resolvedFunction = plannerContext.getMetadata().getCoercion(session, QualifiedName.of(LITERAL_FUNCTION_NAME), argumentType, type);
    return FunctionCallBuilder.resolve(session, plannerContext.getMetadata()).setName(resolvedFunction.toQualifiedName()).addArgument(argumentType, argument).build();
}
Also used : TimestampWithTimeZoneToVarcharCast(io.trino.operator.scalar.timestamptz.TimestampWithTimeZoneToVarcharCast) TimestampToVarcharCast(io.trino.operator.scalar.timestamp.TimestampToVarcharCast) Cast(io.trino.sql.tree.Cast) SliceOutput(io.airlift.slice.SliceOutput) DynamicSliceOutput(io.airlift.slice.DynamicSliceOutput) VarcharType(io.trino.spi.type.VarcharType) BooleanLiteral(io.trino.sql.tree.BooleanLiteral) GenericLiteral(io.trino.sql.tree.GenericLiteral) DecimalLiteral(io.trino.sql.tree.DecimalLiteral) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TimestampType(io.trino.spi.type.TimestampType) DynamicSliceOutput(io.airlift.slice.DynamicSliceOutput) Int128(io.trino.spi.type.Int128) TimestampLiteral(io.trino.sql.tree.TimestampLiteral) LongLiteral(io.trino.sql.tree.LongLiteral) ResolvedFunction(io.trino.metadata.ResolvedFunction) Float.intBitsToFloat(java.lang.Float.intBitsToFloat) TimestampWithTimeZoneType(io.trino.spi.type.TimestampWithTimeZoneType) TypeSignatureTranslator.toSqlType(io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) TimestampType(io.trino.spi.type.TimestampType) VarcharType(io.trino.spi.type.VarcharType) CharType(io.trino.spi.type.CharType) StringLiteral(io.trino.sql.tree.StringLiteral) ArithmeticUnaryExpression(io.trino.sql.tree.ArithmeticUnaryExpression) Expression(io.trino.sql.tree.Expression) Slice(io.airlift.slice.Slice) SqlDate(io.trino.spi.type.SqlDate) DecimalType(io.trino.spi.type.DecimalType) Utils.nativeValueToBlock(io.trino.spi.predicate.Utils.nativeValueToBlock) Block(io.trino.spi.block.Block) DoubleLiteral(io.trino.sql.tree.DoubleLiteral) CharType(io.trino.spi.type.CharType) NullLiteral(io.trino.sql.tree.NullLiteral)

Example 2 with SqlDate

use of io.trino.spi.type.SqlDate in project trino by trinodb.

the class AbstractTestHive method assertGetRecords.

protected void assertGetRecords(HiveStorageFormat hiveStorageFormat, ConnectorTableMetadata tableMetadata, HiveSplit hiveSplit, ConnectorPageSource pageSource, List<? extends ColumnHandle> columnHandles) throws IOException {
    try {
        MaterializedResult result = materializeSourceDataStream(newSession(), pageSource, getTypes(columnHandles));
        assertPageSourceType(pageSource, hiveStorageFormat);
        ImmutableMap<String, Integer> columnIndex = indexColumns(tableMetadata);
        long rowNumber = 0;
        long completedBytes = 0;
        for (MaterializedRow row : result) {
            try {
                assertValueTypes(row, tableMetadata.getColumns());
            } catch (RuntimeException e) {
                throw new RuntimeException("row " + rowNumber, e);
            }
            rowNumber++;
            Integer index;
            Object value;
            // STRING
            index = columnIndex.get("t_string");
            value = row.getField(index);
            if (rowNumber % 19 == 0) {
                assertNull(value);
            } else if (rowNumber % 19 == 1) {
                assertEquals(value, "");
            } else {
                assertEquals(value, "test");
            }
            // NUMBERS
            assertEquals(row.getField(columnIndex.get("t_tinyint")), (byte) (1 + rowNumber));
            assertEquals(row.getField(columnIndex.get("t_smallint")), (short) (2 + rowNumber));
            assertEquals(row.getField(columnIndex.get("t_int")), (int) (3 + rowNumber));
            index = columnIndex.get("t_bigint");
            if ((rowNumber % 13) == 0) {
                assertNull(row.getField(index));
            } else {
                assertEquals(row.getField(index), 4 + rowNumber);
            }
            assertEquals((Float) row.getField(columnIndex.get("t_float")), 5.1f + rowNumber, 0.001);
            assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
            // BOOLEAN
            index = columnIndex.get("t_boolean");
            if ((rowNumber % 3) == 2) {
                assertNull(row.getField(index));
            } else {
                assertEquals(row.getField(index), (rowNumber % 3) != 0);
            }
            // TIMESTAMP
            index = columnIndex.get("t_timestamp");
            if (index != null) {
                if ((rowNumber % 17) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlTimestamp expected = sqlTimestampOf(3, 2011, 5, 6, 7, 8, 9, 123);
                    assertEquals(row.getField(index), expected);
                }
            }
            // BINARY
            index = columnIndex.get("t_binary");
            if (index != null) {
                if ((rowNumber % 23) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), new SqlVarbinary("test binary".getBytes(UTF_8)));
                }
            }
            // DATE
            index = columnIndex.get("t_date");
            if (index != null) {
                if ((rowNumber % 37) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlDate expected = new SqlDate(toIntExact(MILLISECONDS.toDays(new DateTime(2013, 8, 9, 0, 0, 0, UTC).getMillis())));
                    assertEquals(row.getField(index), expected);
                }
            }
            // VARCHAR(50)
            index = columnIndex.get("t_varchar");
            if (index != null) {
                value = row.getField(index);
                if (rowNumber % 39 == 0) {
                    assertNull(value);
                } else if (rowNumber % 39 == 1) {
                    // RCBINARY reads empty VARCHAR as null
                    if (hiveStorageFormat == RCBINARY) {
                        assertNull(value);
                    } else {
                        assertEquals(value, "");
                    }
                } else {
                    assertEquals(value, "test varchar");
                }
            }
            // CHAR(25)
            index = columnIndex.get("t_char");
            if (index != null) {
                value = row.getField(index);
                if ((rowNumber % 41) == 0) {
                    assertNull(value);
                } else {
                    assertEquals(value, (rowNumber % 41) == 1 ? "                         " : "test char                ");
                }
            }
            // MAP<STRING, STRING>
            index = columnIndex.get("t_map");
            if (index != null) {
                if ((rowNumber % 27) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), ImmutableMap.of("test key", "test value"));
                }
            }
            // ARRAY<STRING>
            index = columnIndex.get("t_array_string");
            if (index != null) {
                if ((rowNumber % 29) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertEquals(row.getField(index), ImmutableList.of("abc", "xyz", "data"));
                }
            }
            // ARRAY<TIMESTAMP>
            index = columnIndex.get("t_array_timestamp");
            if (index != null) {
                if ((rowNumber % 43) == 0) {
                    assertNull(row.getField(index));
                } else {
                    SqlTimestamp expected = sqlTimestampOf(3, LocalDateTime.of(2011, 5, 6, 7, 8, 9, 123_000_000));
                    assertEquals(row.getField(index), ImmutableList.of(expected));
                }
            }
            // ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>
            index = columnIndex.get("t_array_struct");
            if (index != null) {
                if ((rowNumber % 31) == 0) {
                    assertNull(row.getField(index));
                } else {
                    List<Object> expected1 = ImmutableList.of("test abc", 0.1);
                    List<Object> expected2 = ImmutableList.of("test xyz", 0.2);
                    assertEquals(row.getField(index), ImmutableList.of(expected1, expected2));
                }
            }
            // STRUCT<s_string: STRING, s_double:DOUBLE>
            index = columnIndex.get("t_struct");
            if (index != null) {
                if ((rowNumber % 31) == 0) {
                    assertNull(row.getField(index));
                } else {
                    assertTrue(row.getField(index) instanceof List);
                    List<?> values = (List<?>) row.getField(index);
                    assertEquals(values.size(), 2);
                    assertEquals(values.get(0), "test abc");
                    assertEquals(values.get(1), 0.1);
                }
            }
            // MAP<INT, ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>>
            index = columnIndex.get("t_complex");
            if (index != null) {
                if ((rowNumber % 33) == 0) {
                    assertNull(row.getField(index));
                } else {
                    List<Object> expected1 = ImmutableList.of("test abc", 0.1);
                    List<Object> expected2 = ImmutableList.of("test xyz", 0.2);
                    assertEquals(row.getField(index), ImmutableMap.of(1, ImmutableList.of(expected1, expected2)));
                }
            }
            // NEW COLUMN
            assertNull(row.getField(columnIndex.get("new_column")));
            long newCompletedBytes = pageSource.getCompletedBytes();
            assertTrue(newCompletedBytes >= completedBytes);
            // some formats (e.g., parquet) over read the data by a bit
            assertLessThanOrEqual(newCompletedBytes, hiveSplit.getLength() + (100 * 1024));
            completedBytes = newCompletedBytes;
        }
        assertLessThanOrEqual(completedBytes, hiveSplit.getLength() + (100 * 1024));
        assertEquals(rowNumber, 100);
    } finally {
        pageSource.close();
    }
}
Also used : SqlVarbinary(io.trino.spi.type.SqlVarbinary) SqlTimestamp(io.trino.spi.type.SqlTimestamp) LocalDateTime(java.time.LocalDateTime) DateTime(org.joda.time.DateTime) SqlDate(io.trino.spi.type.SqlDate) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) List(java.util.List) MaterializedResult(io.trino.testing.MaterializedResult) MaterializedRow(io.trino.testing.MaterializedRow)

Example 3 with SqlDate

use of io.trino.spi.type.SqlDate in project trino by trinodb.

the class TestDate method testGreatest.

@Test
public void testGreatest() {
    int days = (int) TimeUnit.MILLISECONDS.toDays(new DateTime(2013, 3, 30, 0, 0, UTC).getMillis());
    assertFunction("greatest(DATE '2013-03-30', DATE '2012-05-23')", DATE, new SqlDate(days));
    assertFunction("greatest(DATE '2013-03-30', DATE '2012-05-23', DATE '2012-06-01')", DATE, new SqlDate(days));
}
Also used : SqlDate(io.trino.spi.type.SqlDate) DateTime(org.joda.time.DateTime) Test(org.testng.annotations.Test)

Example 4 with SqlDate

use of io.trino.spi.type.SqlDate in project trino by trinodb.

the class OrcTester method decodeRecordReaderValue.

private static Object decodeRecordReaderValue(Type type, Object actualValue) {
    if (actualValue instanceof BooleanWritable) {
        actualValue = ((BooleanWritable) actualValue).get();
    } else if (actualValue instanceof ByteWritable) {
        actualValue = ((ByteWritable) actualValue).get();
    } else if (actualValue instanceof BytesWritable) {
        actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
    } else if (actualValue instanceof DateWritableV2) {
        actualValue = new SqlDate(((DateWritableV2) actualValue).getDays());
    } else if (actualValue instanceof DoubleWritable) {
        actualValue = ((DoubleWritable) actualValue).get();
    } else if (actualValue instanceof FloatWritable) {
        actualValue = ((FloatWritable) actualValue).get();
    } else if (actualValue instanceof IntWritable) {
        actualValue = ((IntWritable) actualValue).get();
    } else if (actualValue instanceof HiveCharWritable) {
        actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
    } else if (actualValue instanceof LongWritable) {
        actualValue = ((LongWritable) actualValue).get();
    } else if (actualValue instanceof ShortWritable) {
        actualValue = ((ShortWritable) actualValue).get();
    } else if (actualValue instanceof HiveDecimalWritable) {
        DecimalType decimalType = (DecimalType) type;
        HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
        // writable messes with the scale so rescale the values to the Trino type
        BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
        actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
    } else if (actualValue instanceof Text) {
        actualValue = actualValue.toString();
    } else if (actualValue instanceof TimestampWritableV2) {
        Timestamp timestamp = ((TimestampWritableV2) actualValue).getTimestamp();
        if (type.equals(TIMESTAMP_MILLIS)) {
            actualValue = sqlTimestampOf(3, timestamp.toEpochMilli());
        } else if (type.equals(TIMESTAMP_MICROS)) {
            long micros = timestamp.toEpochSecond() * MICROSECONDS_PER_SECOND;
            micros += roundDiv(timestamp.getNanos(), NANOSECONDS_PER_MICROSECOND);
            actualValue = SqlTimestamp.newInstance(6, micros, 0);
        } else if (type.equals(TIMESTAMP_NANOS)) {
            long micros = timestamp.toEpochSecond() * MICROSECONDS_PER_SECOND;
            micros += timestamp.getNanos() / NANOSECONDS_PER_MICROSECOND;
            int picosOfMicro = (timestamp.getNanos() % NANOSECONDS_PER_MICROSECOND) * PICOSECONDS_PER_NANOSECOND;
            actualValue = SqlTimestamp.newInstance(9, micros, picosOfMicro);
        } else if (type.equals(TIMESTAMP_TZ_MILLIS)) {
            actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), 0, UTC_KEY);
        } else if (type.equals(TIMESTAMP_TZ_MICROS)) {
            int picosOfMilli = roundDiv(timestamp.getNanos(), NANOSECONDS_PER_MICROSECOND) * PICOSECONDS_PER_MICROSECOND;
            actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), picosOfMilli, UTC_KEY);
        } else if (type.equals(TIMESTAMP_TZ_NANOS)) {
            int picosOfMilli = (timestamp.getNanos() % NANOSECONDS_PER_MILLISECOND) * PICOSECONDS_PER_NANOSECOND;
            actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), picosOfMilli, UTC_KEY);
        } else {
            throw new IllegalArgumentException("Unsupported timestamp type: " + type);
        }
    } else if (actualValue instanceof OrcStruct) {
        List<Object> fields = new ArrayList<>();
        OrcStruct structObject = (OrcStruct) actualValue;
        for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
            fields.add(OrcUtil.getFieldValue(structObject, fieldId));
        }
        actualValue = decodeRecordReaderStruct(type, fields);
    } else if (actualValue instanceof List) {
        actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
    } else if (actualValue instanceof Map) {
        actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
    }
    return actualValue;
}
Also used : SqlVarbinary(io.trino.spi.type.SqlVarbinary) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) SqlTimestamp(io.trino.spi.type.SqlTimestamp) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) LongTimestamp(io.trino.spi.type.LongTimestamp) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) SqlDecimal(io.trino.spi.type.SqlDecimal) Text(org.apache.hadoop.io.Text) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) SqlDate(io.trino.spi.type.SqlDate) DecimalType(io.trino.spi.type.DecimalType) BigInteger(java.math.BigInteger) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Example 5 with SqlDate

use of io.trino.spi.type.SqlDate in project trino by trinodb.

the class RcFileTester method preprocessWriteValueOld.

private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
    if (value == null) {
        return null;
    }
    if (type.equals(BOOLEAN)) {
        return value;
    }
    if (type.equals(TINYINT)) {
        return ((Number) value).byteValue();
    }
    if (type.equals(SMALLINT)) {
        return ((Number) value).shortValue();
    }
    if (type.equals(INTEGER)) {
        return ((Number) value).intValue();
    }
    if (type.equals(BIGINT)) {
        return ((Number) value).longValue();
    }
    if (type.equals(REAL)) {
        return ((Number) value).floatValue();
    }
    if (type.equals(DOUBLE)) {
        return ((Number) value).doubleValue();
    }
    if (type instanceof VarcharType) {
        return value;
    }
    if (type.equals(VARBINARY)) {
        return ((SqlVarbinary) value).getBytes();
    }
    if (type.equals(DATE)) {
        return Date.ofEpochDay(((SqlDate) value).getDays());
    }
    if (type.equals(TIMESTAMP_MILLIS)) {
        long millis = ((SqlTimestamp) value).getMillis();
        if (format == Format.BINARY) {
            millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
        }
        return Timestamp.ofEpochMilli(millis);
    }
    if (type instanceof DecimalType) {
        return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
    }
    if (type instanceof ArrayType) {
        Type elementType = type.getTypeParameters().get(0);
        return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
    }
    if (type instanceof MapType) {
        Type keyType = type.getTypeParameters().get(0);
        Type valueType = type.getTypeParameters().get(1);
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
        }
        return newMap;
    }
    if (type instanceof RowType) {
        List<?> fieldValues = (List<?>) value;
        List<Type> fieldTypes = type.getTypeParameters();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IllegalArgumentException("unsupported type: " + type);
}
Also used : SnappyCodec(org.apache.hadoop.io.compress.SnappyCodec) PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY(io.trino.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY) DateTimeZone(org.joda.time.DateTimeZone) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) Writable(org.apache.hadoop.io.Writable) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) Date(org.apache.hadoop.hive.common.type.Date) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) Decimals.rescale(io.trino.spi.type.Decimals.rescale) FileSplit(org.apache.hadoop.mapred.FileSplit) RcFileDecoderUtils.findFirstSyncPosition(io.trino.rcfile.RcFileDecoderUtils.findFirstSyncPosition) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) Files.createTempDirectory(java.nio.file.Files.createTempDirectory) Slices(io.airlift.slice.Slices) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) BigInteger(java.math.BigInteger) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Assert.assertFalse(org.testng.Assert.assertFalse) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) IntWritable(org.apache.hadoop.io.IntWritable) SMALLINT(io.trino.spi.type.SmallintType.SMALLINT) SERIALIZATION_LIB(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) BytesRefArrayWritable(org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable) META_TABLE_COLUMN_TYPES(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) Set(java.util.Set) READ_ALL_COLUMNS(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS) MICROSECONDS_PER_MILLISECOND(io.trino.type.DateTimes.MICROSECONDS_PER_MILLISECOND) UncheckedIOException(java.io.UncheckedIOException) BooleanWritable(org.apache.hadoop.io.BooleanWritable) RecordReader(org.apache.hadoop.mapred.RecordReader) TypeSignatureParameter(io.trino.spi.type.TypeSignatureParameter) DATE(io.trino.spi.type.DateType.DATE) REAL(io.trino.spi.type.RealType.REAL) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Lz4Codec(org.apache.hadoop.io.compress.Lz4Codec) Iterables(com.google.common.collect.Iterables) Slice(io.airlift.slice.Slice) TIMESTAMP_MILLIS(io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) StructObject(org.apache.hadoop.hive.serde2.StructObject) Page(io.trino.spi.Page) SqlDecimal(io.trino.spi.type.SqlDecimal) Functions.constant(com.google.common.base.Functions.constant) BOOLEAN(io.trino.spi.type.BooleanType.BOOLEAN) META_TABLE_COLUMNS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS) ArrayList(java.util.ArrayList) NONE(io.trino.rcfile.RcFileTester.Compression.NONE) VARCHAR(io.trino.spi.type.VarcharType.VARCHAR) Lists(com.google.common.collect.Lists) ALLOW_INSECURE(com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE) BZIP2(io.trino.rcfile.RcFileTester.Compression.BZIP2) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) VARBINARY(io.trino.spi.type.VarbinaryType.VARBINARY) HadoopNative(io.trino.hadoop.HadoopNative) LinkedHashSet(java.util.LinkedHashSet) Int128(io.trino.spi.type.Int128) Properties(java.util.Properties) MapType(io.trino.spi.type.MapType) AbstractIterator(com.google.common.collect.AbstractIterator) TESTING_TYPE_MANAGER(io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) File(java.io.File) NULL(org.apache.hadoop.mapred.Reporter.NULL) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) DOUBLE(io.trino.spi.type.DoubleType.DOUBLE) SqlVarbinary(io.trino.spi.type.SqlVarbinary) SIZE_OF_LONG(io.airlift.slice.SizeOf.SIZE_OF_LONG) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) TINYINT(io.trino.spi.type.TinyintType.TINYINT) BlockBuilder(io.trino.spi.block.BlockBuilder) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) FloatWritable(org.apache.hadoop.io.FloatWritable) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) BinaryRcFileEncoding(io.trino.rcfile.binary.BinaryRcFileEncoding) DateTimeTestingUtils.sqlTimestampOf(io.trino.testing.DateTimeTestingUtils.sqlTimestampOf) Iterables.transform(com.google.common.collect.Iterables.transform) LazyBinaryColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe) MoreFiles.deleteRecursively(com.google.common.io.MoreFiles.deleteRecursively) GzipCodec(org.apache.hadoop.io.compress.GzipCodec) LongWritable(org.apache.hadoop.io.LongWritable) SNAPPY(io.trino.rcfile.RcFileTester.Compression.SNAPPY) TextRcFileEncoding(io.trino.rcfile.text.TextRcFileEncoding) SqlTimestamp(io.trino.spi.type.SqlTimestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) Block(io.trino.spi.block.Block) PRESTO_RCFILE_WRITER_VERSION(io.trino.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION) InputFormat(org.apache.hadoop.mapred.InputFormat) Path(org.apache.hadoop.fs.Path) KILOBYTE(io.airlift.units.DataSize.Unit.KILOBYTE) INTEGER(io.trino.spi.type.IntegerType.INTEGER) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) RowType(io.trino.spi.type.RowType) SIZE_OF_INT(io.airlift.slice.SizeOf.SIZE_OF_INT) ImmutableSet(com.google.common.collect.ImmutableSet) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ImmutableMap(com.google.common.collect.ImmutableMap) Collections.nCopies(java.util.Collections.nCopies) RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) SESSION(io.trino.testing.TestingConnectorSession.SESSION) ArrayType(io.trino.spi.type.ArrayType) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) SqlDate(io.trino.spi.type.SqlDate) ColumnarSerDe(org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe) Objects(java.util.Objects) DataSize(io.airlift.units.DataSize) List(java.util.List) BIGINT(io.trino.spi.type.BigintType.BIGINT) Decimals(io.trino.spi.type.Decimals) Entry(java.util.Map.Entry) LZ4(io.trino.rcfile.RcFileTester.Compression.LZ4) Optional(java.util.Optional) READ_COLUMN_IDS_CONF_STR(org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR) DecimalType(io.trino.spi.type.DecimalType) MAP(io.trino.spi.type.StandardTypes.MAP) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Assert.assertNull(org.testng.Assert.assertNull) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) Type(io.trino.spi.type.Type) Assert.assertEquals(org.testng.Assert.assertEquals) HashMap(java.util.HashMap) DoubleWritable(org.apache.hadoop.io.DoubleWritable) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) VarcharType(io.trino.spi.type.VarcharType) OutputStreamSliceOutput(io.airlift.slice.OutputStreamSliceOutput) COMPRESS_CODEC(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC) ImmutableList(com.google.common.collect.ImmutableList) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Math.toIntExact(java.lang.Math.toIntExact) Iterator(java.util.Iterator) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Iterators.advance(com.google.common.collect.Iterators.advance) FileInputStream(java.io.FileInputStream) JobConf(org.apache.hadoop.mapred.JobConf) BZip2Codec(org.apache.hadoop.io.compress.BZip2Codec) Collectors.toList(java.util.stream.Collectors.toList) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) Serializer(org.apache.hadoop.hive.serde2.Serializer) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Closeable(java.io.Closeable) Assert.assertTrue(org.testng.Assert.assertTrue) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Collections(java.util.Collections) InputStream(java.io.InputStream) ZLIB(io.trino.rcfile.RcFileTester.Compression.ZLIB) VarcharType(io.trino.spi.type.VarcharType) HashMap(java.util.HashMap) SqlVarbinary(io.trino.spi.type.SqlVarbinary) ArrayList(java.util.ArrayList) RowType(io.trino.spi.type.RowType) SqlTimestamp(io.trino.spi.type.SqlTimestamp) MapType(io.trino.spi.type.MapType) ArrayType(io.trino.spi.type.ArrayType) MapType(io.trino.spi.type.MapType) RowType(io.trino.spi.type.RowType) ArrayType(io.trino.spi.type.ArrayType) DecimalType(io.trino.spi.type.DecimalType) Type(io.trino.spi.type.Type) VarcharType(io.trino.spi.type.VarcharType) DecimalType(io.trino.spi.type.DecimalType) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) StructObject(org.apache.hadoop.hive.serde2.StructObject) Map(java.util.Map) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) ImmutableMap(com.google.common.collect.ImmutableMap) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap) HashMap(java.util.HashMap)

Aggregations

SqlDate (io.trino.spi.type.SqlDate)16 DecimalType (io.trino.spi.type.DecimalType)7 ImmutableList (com.google.common.collect.ImmutableList)6 SqlDecimal (io.trino.spi.type.SqlDecimal)6 SqlVarbinary (io.trino.spi.type.SqlVarbinary)6 Type (io.trino.spi.type.Type)6 VarcharType (io.trino.spi.type.VarcharType)6 List (java.util.List)6 Slice (io.airlift.slice.Slice)5 SqlTimestamp (io.trino.spi.type.SqlTimestamp)5 ArrayList (java.util.ArrayList)5 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)4 ImmutableMap (com.google.common.collect.ImmutableMap)4 Block (io.trino.spi.block.Block)4 Map (java.util.Map)4 Collectors.toList (java.util.stream.Collectors.toList)4 DateTime (org.joda.time.DateTime)4 Test (org.testng.annotations.Test)4 Lists.newArrayList (com.google.common.collect.Lists.newArrayList)3 BlockBuilder (io.trino.spi.block.BlockBuilder)3