use of io.prestosql.spi.type.BooleanType.BOOLEAN in project hetu-core by openlookeng.
the class RcFileTester method preprocessWriteValueOld.
private static Object preprocessWriteValueOld(Format format, Type type, Object value) {
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
if (type instanceof VarcharType) {
return value;
}
if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
if (type.equals(DATE)) {
return Date.ofEpochDay(((SqlDate) value).getDays());
}
if (type.equals(TIMESTAMP)) {
long millis = ((SqlTimestamp) value).getMillis();
if (format == Format.BINARY) {
millis = HIVE_STORAGE_TIME_ZONE.convertLocalToUTC(millis, false);
}
return Timestamp.ofEpochMilli(millis);
}
if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
if (type.getTypeSignature().getBase().equals(ARRAY)) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream().map(element -> preprocessWriteValueOld(format, elementType, element)).collect(toList());
}
if (type.getTypeSignature().getBase().equals(MAP)) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(format, keyType, entry.getKey()), preprocessWriteValueOld(format, valueType, entry.getValue()));
}
return newMap;
}
if (type.getTypeSignature().getBase().equals(ROW)) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(format, fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
use of io.prestosql.spi.type.BooleanType.BOOLEAN in project hetu-core by openlookeng.
the class FormatFunction method valueConverter.
private static BiFunction<ConnectorSession, Block, Object> valueConverter(FunctionAndTypeManager functionAndTypeManager, Type type, int position) {
if (type.equals(UNKNOWN)) {
return (session, block) -> null;
}
if (type.equals(BOOLEAN)) {
return (session, block) -> type.getBoolean(block, position);
}
if (type.equals(TINYINT) || type.equals(SMALLINT) || type.equals(INTEGER) || type.equals(BIGINT)) {
return (session, block) -> type.getLong(block, position);
}
if (type.equals(REAL)) {
return (session, block) -> intBitsToFloat(toIntExact(type.getLong(block, position)));
}
if (type.equals(DOUBLE)) {
return (session, block) -> type.getDouble(block, position);
}
if (type.equals(DATE)) {
return (session, block) -> LocalDate.ofEpochDay(type.getLong(block, position));
}
if (type.equals(TIMESTAMP_WITH_TIME_ZONE)) {
return (session, block) -> toZonedDateTime(type.getLong(block, position));
}
if (type.equals(TIMESTAMP)) {
return (session, block) -> toLocalDateTime(type.getLong(block, position));
}
if (type.equals(TIME)) {
return (session, block) -> toLocalTime(session, type.getLong(block, position));
}
// TODO: support TIME WITH TIME ZONE by making SqlTimeWithTimeZone implement TemporalAccessor
if (type.equals(JSON)) {
FunctionHandle functionHandle = functionAndTypeManager.resolveFunction(Optional.empty(), QualifiedObjectName.valueOf(DEFAULT_NAMESPACE, "json_format"), fromTypes(JSON));
MethodHandle handle = functionAndTypeManager.getBuiltInScalarFunctionImplementation(functionHandle).getMethodHandle();
return (session, block) -> convertToString(handle, type.getSlice(block, position));
}
if (isShortDecimal(type)) {
int scale = ((DecimalType) type).getScale();
return (session, block) -> BigDecimal.valueOf(type.getLong(block, position), scale);
}
if (isLongDecimal(type)) {
int scale = ((DecimalType) type).getScale();
return (session, block) -> new BigDecimal(decodeUnscaledValue(type.getSlice(block, position)), scale);
}
if (isVarcharType(type) || isCharType(type)) {
return (session, block) -> type.getSlice(block, position).toStringUtf8();
}
BiFunction<ConnectorSession, Block, Object> function;
if (type.getJavaType() == long.class) {
function = (session, block) -> type.getLong(block, position);
} else if (type.getJavaType() == double.class) {
function = (session, block) -> type.getDouble(block, position);
} else if (type.getJavaType() == boolean.class) {
function = (session, block) -> type.getBoolean(block, position);
} else if (type.getJavaType() == Slice.class) {
function = (session, block) -> type.getSlice(block, position);
} else {
function = (session, block) -> type.getObject(block, position);
}
MethodHandle handle = castToVarchar(functionAndTypeManager, type);
if ((handle == null) || (handle.type().parameterCount() != 1)) {
throw new PrestoException(NOT_SUPPORTED, "Type not supported for formatting: " + type.getDisplayName());
}
return (session, block) -> convertToString(handle, function.apply(session, block));
}
use of io.prestosql.spi.type.BooleanType.BOOLEAN in project hetu-core by openlookeng.
the class ElasticsearchMetadata method toPrestoType.
private Type toPrestoType(IndexMetadata.Field metaDataField, boolean isArray) {
IndexMetadata.Type type = metaDataField.getType();
if (isArray) {
Type elementType = toPrestoType(metaDataField, false);
return new ArrayType(elementType);
}
if (type instanceof PrimitiveType) {
switch(((PrimitiveType) type).getName()) {
case "float":
return REAL;
case "double":
return DOUBLE;
case "byte":
return TINYINT;
case "short":
return SMALLINT;
case "integer":
return INTEGER;
case "long":
return BIGINT;
case "string":
case "text":
case "keyword":
return VARCHAR;
case "ip":
return ipAddressType;
case "boolean":
return BOOLEAN;
case "binary":
return VARBINARY;
default:
break;
}
} else if (type instanceof DateTimeType) {
if (((DateTimeType) type).getFormats().isEmpty()) {
return TIMESTAMP;
}
// otherwise, skip -- we don't support custom formats, yet
} else if (type instanceof ObjectType) {
ObjectType objectType = (ObjectType) type;
List<RowType.Field> fields = objectType.getFields().stream().map(field -> RowType.field(field.getName(), toPrestoType(field))).collect(toImmutableList());
return RowType.from(fields);
}
return null;
}
use of io.prestosql.spi.type.BooleanType.BOOLEAN in project hetu-core by openlookeng.
the class AbstractTestHiveFileFormats method checkCursor.
protected void checkCursor(RecordCursor cursor, List<TestColumn> testColumns, int rowCount) {
List<Type> types = testColumns.stream().map(column -> column.getObjectInspector().getTypeName()).map(type -> HiveType.valueOf(type).getType(TYPE_MANAGER)).collect(toImmutableList());
Map<Type, MethodHandle> distinctFromOperators = types.stream().distinct().collect(toImmutableMap(identity(), HiveTestUtils::distinctFromOperator));
for (int row = 0; row < rowCount; row++) {
assertTrue(cursor.advanceNextPosition());
for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) {
TestColumn testColumn = testColumns.get(i);
Type type = types.get(i);
Object fieldFromCursor = getFieldFromCursor(cursor, type, i);
if (fieldFromCursor == null) {
assertEquals(null, testColumn.getExpectedValue(), "Expected null for column " + testColumn.getName());
} else if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
fieldFromCursor = new BigDecimal((BigInteger) fieldFromCursor, decimalType.getScale());
assertEquals(fieldFromCursor, testColumn.getExpectedValue(), "Wrong value for column " + testColumn.getName());
} else if (testColumn.getObjectInspector().getTypeName().equals("float")) {
assertEquals((float) fieldFromCursor, (float) testColumn.getExpectedValue(), (float) EPSILON);
} else if (testColumn.getObjectInspector().getTypeName().equals("double")) {
assertEquals((double) fieldFromCursor, (double) testColumn.getExpectedValue(), EPSILON);
} else if (testColumn.getObjectInspector().getTypeName().equals("tinyint")) {
assertEquals(((Number) fieldFromCursor).byteValue(), testColumn.getExpectedValue());
} else if (testColumn.getObjectInspector().getTypeName().equals("smallint")) {
assertEquals(((Number) fieldFromCursor).shortValue(), testColumn.getExpectedValue());
} else if (testColumn.getObjectInspector().getTypeName().equals("int")) {
assertEquals(((Number) fieldFromCursor).intValue(), testColumn.getExpectedValue());
} else if (testColumn.getObjectInspector().getCategory() == Category.PRIMITIVE) {
assertEquals(fieldFromCursor, testColumn.getExpectedValue(), "Wrong value for column " + testColumn.getName());
} else {
Block expected = (Block) testColumn.getExpectedValue();
Block actual = (Block) fieldFromCursor;
boolean distinct = isDistinctFrom(distinctFromOperators.get(type), expected, actual);
assertFalse(distinct, "Wrong value for column: " + testColumn.getName());
}
}
}
assertFalse(cursor.advanceNextPosition());
}
use of io.prestosql.spi.type.BooleanType.BOOLEAN in project hetu-core by openlookeng.
the class AbstractTestHive method doTestTransactionDeleteInsert.
private void doTestTransactionDeleteInsert(HiveStorageFormat storageFormat, SchemaTableName tableName, Domain domainToDrop, MaterializedResult insertData, MaterializedResult expectedData, TransactionDeleteInsertTestTag tag, boolean expectQuerySucceed, Optional<ConflictTrigger> conflictTrigger) throws Exception {
Path writePath = null;
Path targetPath = null;
try (Transaction transaction = newTransaction()) {
try {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
ConnectorSession session;
rollbackIfEquals(tag, ROLLBACK_RIGHT_AWAY);
// Query 1: delete
session = newSession();
HiveColumnHandle dsColumnHandle = (HiveColumnHandle) metadata.getColumnHandles(session, tableHandle).get("pk2");
TupleDomain<ColumnHandle> tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(dsColumnHandle, domainToDrop));
Constraint constraint = new Constraint(tupleDomain, convertToPredicate(tupleDomain));
tableHandle = applyFilter(metadata, tableHandle, constraint);
tableHandle = metadata.applyDelete(session, tableHandle).get();
metadata.executeDelete(session, tableHandle);
rollbackIfEquals(tag, ROLLBACK_AFTER_DELETE);
// Query 2: insert
session = newSession();
ConnectorInsertTableHandle insertTableHandle = metadata.beginInsert(session, tableHandle);
rollbackIfEquals(tag, ROLLBACK_AFTER_BEGIN_INSERT);
writePath = getStagingPathRoot(insertTableHandle);
targetPath = getTargetPathRoot(insertTableHandle);
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, insertTableHandle);
sink.appendPage(insertData.toPage());
rollbackIfEquals(tag, ROLLBACK_AFTER_APPEND_PAGE);
Collection<Slice> fragments = getFutureValue(sink.finish());
rollbackIfEquals(tag, ROLLBACK_AFTER_SINK_FINISH);
metadata.finishInsert(session, insertTableHandle, fragments, ImmutableList.of());
rollbackIfEquals(tag, ROLLBACK_AFTER_FINISH_INSERT);
assertEquals(tag, COMMIT);
if (conflictTrigger.isPresent()) {
JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
List<PartitionUpdate> partitionUpdates = fragments.stream().map(Slice::getBytes).map(partitionUpdateCodec::fromJson).collect(toList());
conflictTrigger.get().triggerConflict(session, tableName, insertTableHandle, partitionUpdates);
}
transaction.commit();
if (conflictTrigger.isPresent()) {
assertTrue(expectQuerySucceed);
conflictTrigger.get().verifyAndCleanup(session, tableName);
}
} catch (TestingRollbackException e) {
transaction.rollback();
} catch (PrestoException e) {
assertFalse(expectQuerySucceed);
if (conflictTrigger.isPresent()) {
conflictTrigger.get().verifyAndCleanup(newSession(), tableName);
}
}
}
// check that temporary files are removed
if (writePath != null && !writePath.equals(targetPath)) {
HdfsContext context = new HdfsContext(newSession(), tableName.getSchemaName(), tableName.getTableName());
FileSystem fileSystem = hdfsEnvironment.getFileSystem(context, writePath);
assertFalse(fileSystem.exists(writePath));
}
try (Transaction transaction = newTransaction()) {
// verify partitions
List<String> partitionNames = transaction.getMetastore(tableName.getSchemaName()).getPartitionNames(new HiveIdentity(newSession()), tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new AssertionError("Table does not exist: " + tableName));
assertEqualsIgnoreOrder(partitionNames, expectedData.getMaterializedRows().stream().map(row -> format("pk1=%s/pk2=%s", row.getField(1), row.getField(2))).distinct().collect(toList()));
// load the new table
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
metadata.beginQuery(session);
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values());
// verify the data
MaterializedResult result = readTable(transaction, tableHandle, columnHandles, session, TupleDomain.all(), OptionalInt.empty(), Optional.of(storageFormat));
assertEqualsIgnoreOrder(result.getMaterializedRows(), expectedData.getMaterializedRows());
}
}
Aggregations