use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.
the class OrcTester method preprocessWriteValueHive.
private static Object preprocessWriteValueHive(Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type instanceof CharType) {
return new HiveChar((String) value, ((CharType) type).getLength());
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
}
if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.
the class TestTimestamp method testTimestampBackedByInt64.
@Test
public void testTimestampBackedByInt64() throws Exception {
MessageType parquetSchema = parseMessageType("message hive_timestamp { optional int64 test (TIMESTAMP_MILLIS); }");
ContiguousSet<Long> epochMillisValues = ContiguousSet.create(Range.closedOpen((long) -1_000, (long) 1_000), DiscreteDomain.longs());
ImmutableList.Builder<SqlTimestamp> timestampsMillis = ImmutableList.builder();
ImmutableList.Builder<Long> bigints = ImmutableList.builder();
for (long value : epochMillisValues) {
timestampsMillis.add(SqlTimestamp.fromMillis(3, value));
bigints.add(value);
}
List<ObjectInspector> objectInspectors = singletonList(javaLongObjectInspector);
List<String> columnNames = ImmutableList.of("test");
ConnectorSession session = getHiveSession(new HiveConfig());
try (ParquetTester.TempFile tempFile = new ParquetTester.TempFile("test", "parquet")) {
JobConf jobConf = new JobConf();
jobConf.setEnum(WRITER_VERSION, PARQUET_1_0);
ParquetTester.writeParquetColumn(jobConf, tempFile.getFile(), CompressionCodecName.SNAPPY, ParquetTester.createTableProperties(columnNames, objectInspectors), getStandardStructObjectInspector(columnNames, objectInspectors), new Iterator<?>[] { epochMillisValues.iterator() }, Optional.of(parquetSchema), false);
testReadingAs(TIMESTAMP_MILLIS, session, tempFile, columnNames, timestampsMillis.build());
testReadingAs(BIGINT, session, tempFile, columnNames, bigints.build());
}
}
use of io.trino.spi.type.SqlTimestamp in project trino by trinodb.
the class AbstractTestHiveFileFormats method checkPageSource.
protected void checkPageSource(ConnectorPageSource pageSource, List<TestColumn> testColumns, List<Type> types, int rowCount) throws IOException {
try {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, types);
assertEquals(result.getMaterializedRows().size(), rowCount);
for (MaterializedRow row : result) {
for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) {
TestColumn testColumn = testColumns.get(i);
Type type = types.get(i);
Object actualValue = row.getField(i);
Object expectedValue = testColumn.getExpectedValue();
if (expectedValue instanceof Slice) {
expectedValue = ((Slice) expectedValue).toStringUtf8();
}
if (actualValue == null || expectedValue == null) {
assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().equals("float")) {
assertEquals((float) actualValue, (float) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().equals("double")) {
assertEquals((double) actualValue, (double) expectedValue, EPSILON, "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().equals("date")) {
SqlDate expectedDate = new SqlDate(((Long) expectedValue).intValue());
assertEquals(actualValue, expectedDate, "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().equals("int") || testColumn.getObjectInspector().getTypeName().equals("smallint") || testColumn.getObjectInspector().getTypeName().equals("tinyint")) {
assertEquals(actualValue, expectedValue);
} else if (testColumn.getObjectInspector().getTypeName().equals("timestamp")) {
SqlTimestamp expectedTimestamp = sqlTimestampOf(floorDiv((Long) expectedValue, MICROSECONDS_PER_MILLISECOND));
assertEquals(actualValue, expectedTimestamp, "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().startsWith("char")) {
assertEquals(actualValue, padSpaces((String) expectedValue, (CharType) type), "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) {
if (expectedValue instanceof Slice) {
expectedValue = ((Slice) expectedValue).toStringUtf8();
}
if (actualValue instanceof Slice) {
actualValue = ((Slice) actualValue).toStringUtf8();
}
if (actualValue instanceof SqlVarbinary) {
actualValue = new String(((SqlVarbinary) actualValue).getBytes(), UTF_8);
}
if (actualValue instanceof SqlDecimal) {
actualValue = new BigDecimal(actualValue.toString());
}
assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
} else {
BlockBuilder builder = type.createBlockBuilder(null, 1);
type.writeObject(builder, expectedValue);
expectedValue = type.getObjectValue(SESSION, builder.build(), 0);
assertEquals(actualValue, expectedValue, "Wrong value for column " + testColumn.getName());
}
}
}
} finally {
pageSource.close();
}
}
Aggregations