use of io.trino.spi.type.IntegerType.INTEGER in project trino by trinodb.
the class RcFileTester method preprocessWriteValueOld.
private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS)) {
long millis = ((SqlTimestamp) value).getMillis();
if (format == Format.BINARY) {
millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
}
return Timestamp.ofEpochMilli(millis);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.trino.spi.type.IntegerType.INTEGER in project trino by trinodb.
the class TestMergingPageIterator method testMerging.
@Test
public void testMerging() {
List<Type> types = ImmutableList.of(INTEGER, INTEGER);
List<Integer> sortIndexes = ImmutableList.of(1);
List<SortOrder> sortOrders = ImmutableList.of(SortOrder.ASC_NULLS_FIRST);
List<List<Page>> pageLists = new ArrayList<>();
PageBuilder pageBuilder = new PageBuilder(types);
for (int i = 0; i < 10; i++) {
Iterator<Integer> values = IntStream.range(0, 1000).map(ignored -> ThreadLocalRandom.current().nextInt(100_000)).mapToObj(n -> ((n % 100) == 0) ? null : n).sorted(nullsFirst(naturalOrder())).iterator();
List<Page> pages = new ArrayList<>();
for (int j = 0; j < 10; j++) {
for (int k = 0; k < 100; k++) {
Integer n = values.next();
pageBuilder.declarePosition();
if (n == null) {
pageBuilder.getBlockBuilder(0).appendNull();
pageBuilder.getBlockBuilder(1).appendNull();
} else {
INTEGER.writeLong(pageBuilder.getBlockBuilder(0), n);
INTEGER.writeLong(pageBuilder.getBlockBuilder(1), n * 22L);
}
}
pages.add(pageBuilder.build());
pageBuilder.reset();
}
pageLists.add(pages);
assertFalse(values.hasNext());
}
List<Iterator<Page>> pages = pageLists.stream().map(List::iterator).collect(toList());
Iterator<Page> iterator = new MergingPageIterator(pages, types, sortIndexes, sortOrders, new TypeOperators());
List<Long> values = new ArrayList<>();
while (iterator.hasNext()) {
Page page = iterator.next();
for (int i = 0; i < page.getPositionCount(); i++) {
if (page.getBlock(0).isNull(i)) {
assertTrue(page.getBlock(1).isNull(i));
values.add(null);
} else {
long x = INTEGER.getLong(page.getBlock(0), i);
long y = INTEGER.getLong(page.getBlock(1), i);
assertEquals(y, x * 22);
values.add(x);
}
}
}
assertThat(values).isSortedAccordingTo(nullsFirst(naturalOrder()));
}
use of io.trino.spi.type.IntegerType.INTEGER in project trino by trinodb.
the class KuduBucketFunction method getBucket.
@Override
public int getBucket(Page page, int position) {
checkArgument(this.bucketChannelTypes.size() == page.getChannelCount());
PartialRow partialRow = new PartialRow(this.schema);
List<Integer> bucketIds = this.hashBucketSchemas.stream().map(hashBucketSchema -> this.calculateSchemaLevelBucketId(page, partialRow, hashBucketSchema, position)).collect(toImmutableList());
return getBucket(bucketIds, hashBucketSchemas);
}
use of io.trino.spi.type.IntegerType.INTEGER in project trino by trinodb.
the class HiveUtil method createRecordReader.
public static RecordReader<?, ?> createRecordReader(Configuration configuration, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns) {
// determine which hive columns we will read
List<HiveColumnHandle> readColumns = columns.stream().filter(column -> column.getColumnType() == REGULAR).collect(toImmutableList());
// Projected columns are not supported here
readColumns.forEach(readColumn -> checkArgument(readColumn.isBaseColumn(), "column %s is not a base column", readColumn.getName()));
List<Integer> readHiveColumnIndexes = readColumns.stream().map(HiveColumnHandle::getBaseHiveColumnIndex).collect(toImmutableList());
// Tell hive the columns we would like to read, this lets hive optimize reading column oriented files
configuration = copy(configuration);
setReadColumns(configuration, readHiveColumnIndexes);
InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, true);
JobConf jobConf = toJobConf(configuration);
FileSplit fileSplit = new FileSplit(path, start, length, (String[]) null);
// propagate serialization configuration to getRecordReader
schema.stringPropertyNames().stream().filter(name -> name.startsWith("serialization.")).forEach(name -> jobConf.set(name, schema.getProperty(name)));
configureCompressionCodecs(jobConf);
try {
// raw type on WritableComparable can't be fixed because Utilities#skipHeader takes them raw
@SuppressWarnings({ "rawtypes", "unchecked" }) RecordReader<WritableComparable, Writable> recordReader = (RecordReader<WritableComparable, Writable>) inputFormat.getRecordReader(fileSplit, jobConf, Reporter.NULL);
int headerCount = getHeaderCount(schema);
// Only skip header rows when the split is at the beginning of the file
if (start == 0 && headerCount > 0) {
Utilities.skipHeader(recordReader, headerCount, recordReader.createKey(), recordReader.createValue());
}
int footerCount = getFooterCount(schema);
if (footerCount > 0) {
recordReader = new FooterAwareRecordReader<>(recordReader, footerCount, jobConf);
}
return recordReader;
} catch (IOException e) {
if (e instanceof TextLineLengthLimitExceededException) {
throw new TrinoException(HIVE_BAD_DATA, "Line too long in text file: " + path, e);
}
throw new TrinoException(HIVE_CANNOT_OPEN_SPLIT, format("Error opening Hive split %s (offset=%s, length=%s) using %s: %s", path, start, length, getInputFormatName(schema), firstNonNull(e.getMessage(), e.getClass().getName())), e);
}
}
use of io.trino.spi.type.IntegerType.INTEGER in project trino by trinodb.
the class OrcTester method preprocessWriteValueHive.
private static Object preprocessWriteValueHive(Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type instanceof CharType) {
return new HiveChar((String) value, ((CharType) type).getLength());
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
}
if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
Aggregations