use of io.trino.spi.type.DateType.DATE in project trino by trinodb.
the class H2QueryRunner method rowMapper.
private static RowMapper<MaterializedRow> rowMapper(List<? extends Type> types) {
return (resultSet, context) -> {
int count = resultSet.getMetaData().getColumnCount();
checkArgument(types.size() == count, "expected types count (%s) does not match actual column count (%s)", types.size(), count);
List<Object> row = new ArrayList<>(count);
for (int i = 1; i <= count; i++) {
Type type = types.get(i - 1);
if (BOOLEAN.equals(type)) {
boolean booleanValue = resultSet.getBoolean(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(booleanValue);
}
} else if (TINYINT.equals(type)) {
byte byteValue = resultSet.getByte(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(byteValue);
}
} else if (SMALLINT.equals(type)) {
short shortValue = resultSet.getShort(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(shortValue);
}
} else if (INTEGER.equals(type)) {
int intValue = resultSet.getInt(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(intValue);
}
} else if (BIGINT.equals(type)) {
long longValue = resultSet.getLong(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(longValue);
}
} else if (REAL.equals(type)) {
float floatValue = resultSet.getFloat(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(floatValue);
}
} else if (DOUBLE.equals(type)) {
double doubleValue = resultSet.getDouble(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(doubleValue);
}
} else if (JSON.equals(type)) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(jsonParse(utf8Slice(stringValue)).toStringUtf8());
}
} else if (type instanceof VarcharType) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(stringValue);
}
} else if (type instanceof CharType) {
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(padSpaces(stringValue, (CharType) type));
}
} else if (VARBINARY.equals(type)) {
byte[] bytes = resultSet.getBytes(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(bytes);
}
} else if (DATE.equals(type)) {
// resultSet.getDate(i) doesn't work if JVM's zone skipped day being retrieved (e.g. 2011-12-30 and Pacific/Apia zone)
LocalDate dateValue = resultSet.getObject(i, LocalDate.class);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(dateValue);
}
} else if (type instanceof TimeType) {
// resultSet.getTime(i) doesn't work if JVM's zone had forward offset change during 1970-01-01 (e.g. America/Hermosillo zone)
LocalTime timeValue = resultSet.getObject(i, LocalTime.class);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(timeValue);
}
} else if (TIME_WITH_TIME_ZONE.equals(type)) {
throw new UnsupportedOperationException("H2 does not support TIME WITH TIME ZONE");
} else if (type instanceof TimestampType) {
// resultSet.getTimestamp(i) doesn't work if JVM's zone had forward offset at the date/time being retrieved
LocalDateTime timestampValue;
try {
timestampValue = resultSet.getObject(i, LocalDateTime.class);
} catch (SQLException first) {
// H2 cannot convert DATE to LocalDateTime in their JDBC driver (even though it can convert to java.sql.Timestamp), we need to do this manually
try {
timestampValue = Optional.ofNullable(resultSet.getObject(i, LocalDate.class)).map(LocalDate::atStartOfDay).orElse(null);
} catch (RuntimeException e) {
first.addSuppressed(e);
throw first;
}
}
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(timestampValue);
}
} else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
// This means H2 is unsuitable for testing TIMESTAMP WITH TIME ZONE-bearing queries. Those need to be tested manually.
throw new UnsupportedOperationException();
} else if (UUID.equals(type)) {
java.util.UUID value = (java.util.UUID) resultSet.getObject(i);
row.add(value);
} else if (UNKNOWN.equals(type)) {
Object objectValue = resultSet.getObject(i);
checkState(resultSet.wasNull(), "Expected a null value, but got %s", objectValue);
row.add(null);
} else if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
BigDecimal decimalValue = resultSet.getBigDecimal(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(decimalValue.setScale(decimalType.getScale(), BigDecimal.ROUND_HALF_UP).round(new MathContext(decimalType.getPrecision())));
}
} else if (type instanceof ArrayType) {
Array array = resultSet.getArray(i);
if (resultSet.wasNull()) {
row.add(null);
} else {
row.add(newArrayList((Object[]) array.getArray()));
}
} else {
throw new AssertionError("unhandled type: " + type);
}
}
return new MaterializedRow(MaterializedResult.DEFAULT_PRECISION, row);
};
}
use of io.trino.spi.type.DateType.DATE in project trino by trinodb.
the class H2QueryRunner method insertRows.
private static void insertRows(ConnectorTableMetadata tableMetadata, Handle handle, RecordSet data) {
List<ColumnMetadata> columns = tableMetadata.getColumns().stream().filter(columnMetadata -> !columnMetadata.isHidden()).collect(toImmutableList());
String vars = Joiner.on(',').join(nCopies(columns.size(), "?"));
String sql = format("INSERT INTO %s VALUES (%s)", tableMetadata.getTable().getTableName(), vars);
RecordCursor cursor = data.cursor();
while (true) {
// insert 1000 rows at a time
PreparedBatch batch = handle.prepareBatch(sql);
for (int row = 0; row < 1000; row++) {
if (!cursor.advanceNextPosition()) {
if (batch.size() > 0) {
batch.execute();
}
return;
}
for (int column = 0; column < columns.size(); column++) {
Type type = columns.get(column).getType();
if (BOOLEAN.equals(type)) {
batch.bind(column, cursor.getBoolean(column));
} else if (BIGINT.equals(type)) {
batch.bind(column, cursor.getLong(column));
} else if (INTEGER.equals(type)) {
batch.bind(column, toIntExact(cursor.getLong(column)));
} else if (DOUBLE.equals(type)) {
batch.bind(column, cursor.getDouble(column));
} else if (type instanceof VarcharType) {
batch.bind(column, cursor.getSlice(column).toStringUtf8());
} else if (DATE.equals(type)) {
long millisUtc = TimeUnit.DAYS.toMillis(cursor.getLong(column));
// H2 expects dates in to be millis at midnight in the JVM timezone
long localMillis = DateTimeZone.UTC.getMillisKeepLocal(DateTimeZone.getDefault(), millisUtc);
batch.bind(column, new Date(localMillis));
} else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
batch.add();
}
batch.execute();
}
}
use of io.trino.spi.type.DateType.DATE in project trino by trinodb.
the class PostgreSqlClient method toColumnMapping.
@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle) {
String jdbcTypeName = typeHandle.getJdbcTypeName().orElseThrow(() -> new TrinoException(JDBC_ERROR, "Type name is missing: " + typeHandle));
Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle);
if (mapping.isPresent()) {
return mapping;
}
switch(jdbcTypeName) {
case "money":
return Optional.of(moneyColumnMapping());
case "uuid":
return Optional.of(uuidColumnMapping());
case "jsonb":
case "json":
return Optional.of(jsonColumnMapping());
case "timestamptz":
// PostgreSQL's "timestamp with time zone" is reported as Types.TIMESTAMP rather than Types.TIMESTAMP_WITH_TIMEZONE
int decimalDigits = typeHandle.getRequiredDecimalDigits();
return Optional.of(timestampWithTimeZoneColumnMapping(decimalDigits));
case "hstore":
return Optional.of(hstoreColumnMapping(session));
}
switch(typeHandle.getJdbcType()) {
case Types.BIT:
return Optional.of(booleanColumnMapping());
case Types.SMALLINT:
return Optional.of(smallintColumnMapping());
case Types.INTEGER:
return Optional.of(integerColumnMapping());
case Types.BIGINT:
return Optional.of(bigintColumnMapping());
case Types.REAL:
return Optional.of(realColumnMapping());
case Types.DOUBLE:
return Optional.of(doubleColumnMapping());
case Types.NUMERIC:
{
int columnSize = typeHandle.getRequiredColumnSize();
int precision;
int decimalDigits = typeHandle.getDecimalDigits().orElse(0);
if (getDecimalRounding(session) == ALLOW_OVERFLOW) {
if (columnSize == PRECISION_OF_UNSPECIFIED_DECIMAL) {
// decimal type with unspecified scale - up to 131072 digits before the decimal point; up to 16383 digits after the decimal point)
return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, getDecimalDefaultScale(session)), getDecimalRoundingMode(session)));
}
precision = columnSize;
if (precision > Decimals.MAX_PRECISION) {
int scale = min(decimalDigits, getDecimalDefaultScale(session));
return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale), getDecimalRoundingMode(session)));
}
}
// Map decimal(p, -s) (negative scale) to decimal(p+s, 0).
precision = columnSize + max(-decimalDigits, 0);
if (columnSize == PRECISION_OF_UNSPECIFIED_DECIMAL || precision > Decimals.MAX_PRECISION) {
break;
}
return Optional.of(decimalColumnMapping(createDecimalType(precision, max(decimalDigits, 0)), UNNECESSARY));
}
case Types.CHAR:
return Optional.of(charColumnMapping(typeHandle.getRequiredColumnSize()));
case Types.VARCHAR:
if (!jdbcTypeName.equals("varchar")) {
// This can be e.g. an ENUM
return Optional.of(typedVarcharColumnMapping(jdbcTypeName));
}
return Optional.of(varcharColumnMapping(typeHandle.getRequiredColumnSize()));
case Types.BINARY:
return Optional.of(varbinaryColumnMapping());
case Types.DATE:
return Optional.of(ColumnMapping.longMapping(DATE, (resultSet, index) -> LocalDate.parse(resultSet.getString(index), DATE_FORMATTER).toEpochDay(), dateWriteFunctionUsingLocalDate()));
case Types.TIME:
return Optional.of(timeColumnMapping(typeHandle.getRequiredDecimalDigits()));
case Types.TIMESTAMP:
TimestampType timestampType = createTimestampType(typeHandle.getRequiredDecimalDigits());
return Optional.of(ColumnMapping.longMapping(timestampType, timestampReadFunction(timestampType), PostgreSqlClient::shortTimestampWriteFunction));
case Types.ARRAY:
Optional<ColumnMapping> columnMapping = arrayToTrinoType(session, connection, typeHandle);
if (columnMapping.isPresent()) {
return columnMapping;
}
break;
}
if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
return mapToUnboundedVarchar(typeHandle);
}
return Optional.empty();
}
use of io.trino.spi.type.DateType.DATE in project trino by trinodb.
the class OrcTester method preprocessWriteValueHive.
private static Object preprocessWriteValueHive(Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type instanceof CharType) {
return new HiveChar((String) value, ((CharType) type).getLength());
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP_MILLIS) || type.equals(TIMESTAMP_MICROS) || type.equals(TIMESTAMP_NANOS)) {
LocalDateTime dateTime = ((SqlTimestamp) value).toLocalDateTime();
return Timestamp.ofEpochSecond(dateTime.toEpochSecond(ZoneOffset.UTC), dateTime.getNano());
}
if (type.equals(TIMESTAMP_TZ_MILLIS) || type.equals(TIMESTAMP_TZ_MICROS) || type.equals(TIMESTAMP_TZ_NANOS)) {
SqlTimestampWithTimeZone timestamp = (SqlTimestampWithTimeZone) value;
int nanosOfMilli = roundDiv(timestamp.getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND);
return Timestamp.ofEpochMilli(timestamp.getEpochMillis(), nanosOfMilli);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type instanceof ArrayType) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueHive(elementType, element)).collect(toList());
}
if (type instanceof MapType) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueHive(keyType, entry.getKey()), preprocessWriteValueHive(valueType, entry.getValue()));
}
return newMap;
}
if (type instanceof RowType) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueHive(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.trino.spi.type.DateType.DATE in project trino by trinodb.
the class TestHivePageSink method writeTestFile.
private static long writeTestFile(HiveConfig config, HiveMetastore metastore, String outputPath) {
HiveTransactionHandle transaction = new HiveTransactionHandle(false);
HiveWriterStats stats = new HiveWriterStats();
ConnectorPageSink pageSink = createPageSink(transaction, config, metastore, new Path("file:///" + outputPath), stats);
List<LineItemColumn> columns = getTestColumns();
List<Type> columnTypes = columns.stream().map(LineItemColumn::getType).map(TestHivePageSink::getHiveType).map(hiveType -> hiveType.getType(TESTING_TYPE_MANAGER)).collect(toList());
PageBuilder pageBuilder = new PageBuilder(columnTypes);
int rows = 0;
for (LineItem lineItem : new LineItemGenerator(0.01, 1, 1)) {
rows++;
if (rows >= NUM_ROWS) {
break;
}
pageBuilder.declarePosition();
for (int i = 0; i < columns.size(); i++) {
LineItemColumn column = columns.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
switch(column.getType().getBase()) {
case IDENTIFIER:
BIGINT.writeLong(blockBuilder, column.getIdentifier(lineItem));
break;
case INTEGER:
INTEGER.writeLong(blockBuilder, column.getInteger(lineItem));
break;
case DATE:
DATE.writeLong(blockBuilder, column.getDate(lineItem));
break;
case DOUBLE:
DOUBLE.writeDouble(blockBuilder, column.getDouble(lineItem));
break;
case VARCHAR:
createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(lineItem)));
break;
default:
throw new IllegalArgumentException("Unsupported type " + column.getType());
}
}
}
Page page = pageBuilder.build();
pageSink.appendPage(page);
getFutureValue(pageSink.finish());
File outputDir = new File(outputPath);
List<File> files = ImmutableList.copyOf(outputDir.listFiles((dir, name) -> !name.endsWith(".crc")));
File outputFile = getOnlyElement(files);
long length = outputFile.length();
ConnectorPageSource pageSource = createPageSource(transaction, config, outputFile);
List<Page> pages = new ArrayList<>();
while (!pageSource.isFinished()) {
Page nextPage = pageSource.getNextPage();
if (nextPage != null) {
pages.add(nextPage.getLoadedPage());
}
}
MaterializedResult expectedResults = toMaterializedResult(getHiveSession(config), columnTypes, ImmutableList.of(page));
MaterializedResult results = toMaterializedResult(getHiveSession(config), columnTypes, pages);
assertEquals(results, expectedResults);
assertEquals(round(stats.getInputPageSizeInBytes().getAllTime().getMax()), page.getRetainedSizeInBytes());
return length;
}
Aggregations