use of io.trino.spi.type.SqlVarbinary in project trino by trinodb.
the class TestTDigestAggregationFunction method getExpectedValue.
private Object getExpectedValue(List<Double> weights, double... values) {
assertEquals(weights.size(), values.length, "mismatched weights and values");
if (values.length == 0) {
return null;
}
TDigest tdigest = new TDigest();
for (int i = 0; i < weights.size(); i++) {
tdigest.add(values[i], weights.get(i));
}
return new SqlVarbinary(tdigest.serialize().getBytes());
}
use of io.trino.spi.type.SqlVarbinary in project trino by trinodb.
the class TestChecksumAggregation method expectedChecksum.
private static SqlVarbinary expectedChecksum(Type type, Block block) {
BlockPositionXxHash64 xxHash64Operator = blockTypeOperators.getXxHash64Operator(type);
long result = 0;
for (int i = 0; i < block.getPositionCount(); i++) {
if (block.isNull(i)) {
result += PRIME64;
} else {
result += xxHash64Operator.xxHash64(block, i) * PRIME64;
}
}
return new SqlVarbinary(wrappedLongArray(result).getBytes());
}
use of io.trino.spi.type.SqlVarbinary in project trino by trinodb.
the class OrcTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritableV2) {
actualValue = new SqlDate(((DateWritableV2) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof HiveCharWritable) {
actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Trino type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritableV2) {
Timestamp timestamp = ((TimestampWritableV2) actualValue).getTimestamp();
if (type.equals(TIMESTAMP_MILLIS)) {
actualValue = sqlTimestampOf(3, timestamp.toEpochMilli());
} else if (type.equals(TIMESTAMP_MICROS)) {
long micros = timestamp.toEpochSecond() * MICROSECONDS_PER_SECOND;
micros += roundDiv(timestamp.getNanos(), NANOSECONDS_PER_MICROSECOND);
actualValue = SqlTimestamp.newInstance(6, micros, 0);
} else if (type.equals(TIMESTAMP_NANOS)) {
long micros = timestamp.toEpochSecond() * MICROSECONDS_PER_SECOND;
micros += timestamp.getNanos() / NANOSECONDS_PER_MICROSECOND;
int picosOfMicro = (timestamp.getNanos() % NANOSECONDS_PER_MICROSECOND) * PICOSECONDS_PER_NANOSECOND;
actualValue = SqlTimestamp.newInstance(9, micros, picosOfMicro);
} else if (type.equals(TIMESTAMP_TZ_MILLIS)) {
actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), 0, UTC_KEY);
} else if (type.equals(TIMESTAMP_TZ_MICROS)) {
int picosOfMilli = roundDiv(timestamp.getNanos(), NANOSECONDS_PER_MICROSECOND) * PICOSECONDS_PER_MICROSECOND;
actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), picosOfMilli, UTC_KEY);
} else if (type.equals(TIMESTAMP_TZ_NANOS)) {
int picosOfMilli = (timestamp.getNanos() % NANOSECONDS_PER_MILLISECOND) * PICOSECONDS_PER_NANOSECOND;
actualValue = SqlTimestampWithTimeZone.newInstance(3, timestamp.toEpochMilli(), picosOfMilli, UTC_KEY);
} else {
throw new IllegalArgumentException("Unsupported timestamp type: " + type);
}
} else if (actualValue instanceof OrcStruct) {
List<Object> fields = new ArrayList<>();
OrcStruct structObject = (OrcStruct) actualValue;
for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
fields.add(OrcUtil.getFieldValue(structObject, fieldId));
}
actualValue = decodeRecordReaderStruct(type, fields);
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
} else if (actualValue instanceof Map) {
actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
}
return actualValue;
}
use of io.trino.spi.type.SqlVarbinary in project trino by trinodb.
the class OrcTester method writeValue.
private static void writeValue(Type type, BlockBuilder blockBuilder, Object value) {
if (value == null) {
blockBuilder.appendNull();
} else {
if (BOOLEAN.equals(type)) {
type.writeBoolean(blockBuilder, (Boolean) value);
} else if (TINYINT.equals(type) || SMALLINT.equals(type) || INTEGER.equals(type) || BIGINT.equals(type)) {
type.writeLong(blockBuilder, ((Number) value).longValue());
} else if (Decimals.isShortDecimal(type)) {
type.writeLong(blockBuilder, ((SqlDecimal) value).toBigDecimal().unscaledValue().longValue());
} else if (Decimals.isLongDecimal(type)) {
type.writeObject(blockBuilder, Int128.valueOf(((SqlDecimal) value).toBigDecimal().unscaledValue()));
} else if (DOUBLE.equals(type)) {
type.writeDouble(blockBuilder, ((Number) value).doubleValue());
} else if (REAL.equals(type)) {
float floatValue = ((Number) value).floatValue();
type.writeLong(blockBuilder, Float.floatToIntBits(floatValue));
} else if (type instanceof VarcharType) {
Slice slice = truncateToLength(utf8Slice((String) value), type);
type.writeSlice(blockBuilder, slice);
} else if (type instanceof CharType) {
Slice slice = truncateToLengthAndTrimSpaces(utf8Slice((String) value), type);
type.writeSlice(blockBuilder, slice);
} else if (VARBINARY.equals(type)) {
type.writeSlice(blockBuilder, Slices.wrappedBuffer(((SqlVarbinary) value).getBytes()));
} else if (DATE.equals(type)) {
long days = ((SqlDate) value).getDays();
type.writeLong(blockBuilder, days);
} else if (TIMESTAMP_MILLIS.equals(type)) {
type.writeLong(blockBuilder, ((SqlTimestamp) value).getEpochMicros());
} else if (TIMESTAMP_MICROS.equals(type)) {
long micros = ((SqlTimestamp) value).getEpochMicros();
type.writeLong(blockBuilder, micros);
} else if (TIMESTAMP_NANOS.equals(type)) {
SqlTimestamp ts = (SqlTimestamp) value;
type.writeObject(blockBuilder, new LongTimestamp(ts.getEpochMicros(), ts.getPicosOfMicros()));
} else if (TIMESTAMP_TZ_MILLIS.equals(type)) {
long millis = ((SqlTimestampWithTimeZone) value).getEpochMillis();
type.writeLong(blockBuilder, packDateTimeWithZone(millis, UTC_KEY));
} else if (TIMESTAMP_TZ_MICROS.equals(type) || TIMESTAMP_TZ_NANOS.equals(type)) {
SqlTimestampWithTimeZone ts = (SqlTimestampWithTimeZone) value;
type.writeObject(blockBuilder, LongTimestampWithTimeZone.fromEpochMillisAndFraction(ts.getEpochMillis(), ts.getPicosOfMilli(), UTC_KEY));
} else {
if (type instanceof ArrayType) {
List<?> array = (List<?>) value;
Type elementType = type.getTypeParameters().get(0);
BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
for (Object elementValue : array) {
writeValue(elementType, arrayBlockBuilder, elementValue);
}
blockBuilder.closeEntry();
} else if (type instanceof MapType) {
Map<?, ?> map = (Map<?, ?>) value;
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
for (Entry<?, ?> entry : map.entrySet()) {
writeValue(keyType, mapBlockBuilder, entry.getKey());
writeValue(valueType, mapBlockBuilder, entry.getValue());
}
blockBuilder.closeEntry();
} else if (type instanceof RowType) {
List<?> array = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
for (int fieldId = 0; fieldId < fieldTypes.size(); fieldId++) {
Type fieldType = fieldTypes.get(fieldId);
writeValue(fieldType, rowBlockBuilder, array.get(fieldId));
}
blockBuilder.closeEntry();
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
}
}
use of io.trino.spi.type.SqlVarbinary in project trino by trinodb.
the class RcFileTester method preprocessWriteValueOld.
private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS)) {
long millis = ((SqlTimestamp) value).getMillis();
if (format == Format.BINARY) {
millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
}
return Timestamp.ofEpochMilli(millis);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
Aggregations