use of com.facebook.presto.spi.type.TypeManager in project presto by prestodb.
the class ParquetTester method assertFileContents.
private static void assertFileContents(JobConf jobConf, TempFile tempFile, Iterable<?> expectedValues, Type type) throws IOException, InterruptedException {
Path path = new Path(tempFile.getFile().toURI());
FileSystem fileSystem = path.getFileSystem(jobConf);
ParquetMetadata parquetMetadata = ParquetMetadataReader.readFooter(fileSystem, path);
FileMetaData fileMetaData = parquetMetadata.getFileMetaData();
MessageType fileSchema = fileMetaData.getSchema();
long size = fileSystem.getFileStatus(path).getLen();
FSDataInputStream inputStream = fileSystem.open(path);
ParquetDataSource dataSource = new HdfsParquetDataSource(path, size, inputStream);
TypeManager typeManager = new TypeRegistry();
ParquetReader parquetReader = new ParquetReader(fileSchema, fileSchema, parquetMetadata.getBlocks(), dataSource, typeManager, new AggregatedMemoryContext());
assertEquals(parquetReader.getPosition(), 0);
int rowsProcessed = 0;
Iterator<?> iterator = expectedValues.iterator();
for (int batchSize = parquetReader.nextBatch(); batchSize >= 0; batchSize = parquetReader.nextBatch()) {
ColumnDescriptor columnDescriptor = fileSchema.getColumns().get(0);
Block block = parquetReader.readPrimitive(columnDescriptor, type);
for (int i = 0; i < batchSize; i++) {
assertTrue(iterator.hasNext());
Object expected = iterator.next();
Object actual = decodeObject(type, block, i);
assertEquals(actual, expected);
}
rowsProcessed += batchSize;
assertEquals(parquetReader.getPosition(), rowsProcessed);
}
assertFalse(iterator.hasNext());
assertEquals(parquetReader.getPosition(), rowsProcessed);
parquetReader.close();
}
use of com.facebook.presto.spi.type.TypeManager in project presto by prestodb.
the class TestTypeRegistry method testNonexistentType.
@Test
public void testNonexistentType() {
TypeManager typeManager = new TypeRegistry();
assertNull(typeManager.getType(parseTypeSignature("not a real type")));
}
use of com.facebook.presto.spi.type.TypeManager in project presto by prestodb.
the class ZipFunction method specialize.
@Override
public ScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, TypeManager typeManager, FunctionRegistry functionRegistry) {
List<Type> types = this.typeParameters.stream().map(boundVariables::getTypeVariable).collect(toImmutableList());
List<Boolean> nullableArguments = types.stream().map(type -> false).collect(toImmutableList());
List<Class<?>> javaArgumentTypes = types.stream().map(type -> Block.class).collect(toImmutableList());
MethodHandle methodHandle = METHOD_HANDLE.bindTo(types).asVarargsCollector(Block[].class).asType(methodType(Block.class, javaArgumentTypes));
return new ScalarFunctionImplementation(false, nullableArguments, methodHandle, isDeterministic());
}
use of com.facebook.presto.spi.type.TypeManager in project presto by prestodb.
the class TestAnalyzer method setup.
@BeforeMethod(alwaysRun = true)
public void setup() throws Exception {
TypeManager typeManager = new TypeRegistry();
CatalogManager catalogManager = new CatalogManager();
transactionManager = createTestTransactionManager(catalogManager);
accessControl = new AccessControlManager(transactionManager);
metadata = new MetadataManager(new FeaturesConfig(), typeManager, new BlockEncodingManager(typeManager), new SessionPropertyManager(), new SchemaPropertyManager(), new TablePropertyManager(), transactionManager);
metadata.getFunctionRegistry().addFunctions(ImmutableList.of(APPLY_FUNCTION));
catalogManager.registerCatalog(createTestingCatalog(TPCH_CATALOG, TPCH_CONNECTOR_ID));
catalogManager.registerCatalog(createTestingCatalog(SECOND_CATALOG, SECOND_CONNECTOR_ID));
catalogManager.registerCatalog(createTestingCatalog(THIRD_CATALOG, THIRD_CONNECTOR_ID));
SchemaTableName table1 = new SchemaTableName("s1", "t1");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table1, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT)))));
SchemaTableName table2 = new SchemaTableName("s1", "t2");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table2, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT)))));
SchemaTableName table3 = new SchemaTableName("s1", "t3");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table3, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("x", BIGINT, null, true)))));
// table in different catalog
SchemaTableName table4 = new SchemaTableName("s2", "t4");
inSetupTransaction(session -> metadata.createTable(session, SECOND_CATALOG, new ConnectorTableMetadata(table4, ImmutableList.of(new ColumnMetadata("a", BIGINT)))));
// table with a hidden column
SchemaTableName table5 = new SchemaTableName("s1", "t5");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table5, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT, null, true)))));
// table with a varchar column
SchemaTableName table6 = new SchemaTableName("s1", "t6");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table6, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", VARCHAR), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT)))));
// table with bigint, double, array of bigints and array of doubles column
SchemaTableName table7 = new SchemaTableName("s1", "t7");
inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table7, ImmutableList.of(new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", DOUBLE), new ColumnMetadata("c", new ArrayType(BIGINT)), new ColumnMetadata("d", new ArrayType(DOUBLE))))));
// valid view referencing table in same schema
String viewData1 = JsonCodec.jsonCodec(ViewDefinition.class).toJson(new ViewDefinition("select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("user")));
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v1"), viewData1, false));
// stale view (different column type)
String viewData2 = JsonCodec.jsonCodec(ViewDefinition.class).toJson(new ViewDefinition("select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", VARCHAR)), Optional.of("user")));
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v2"), viewData2, false));
// view referencing table in different schema from itself and session
String viewData3 = JsonCodec.jsonCodec(ViewDefinition.class).toJson(new ViewDefinition("select a from t4", Optional.of(SECOND_CATALOG), Optional.of("s2"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("owner")));
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(THIRD_CATALOG, "s3", "v3"), viewData3, false));
// valid view with uppercase column name
String viewData4 = JsonCodec.jsonCodec(ViewDefinition.class).toJson(new ViewDefinition("select A from t1", Optional.of("tpch"), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("user")));
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName("tpch", "s1", "v4"), viewData4, false));
// recursive view referencing to itself
String viewData5 = JsonCodec.jsonCodec(ViewDefinition.class).toJson(new ViewDefinition("select * from v5", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("user")));
inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v5"), viewData5, false));
this.metadata = metadata;
}
use of com.facebook.presto.spi.type.TypeManager in project presto by prestodb.
the class HiveMetadata method columnMetadataGetter.
private static Function<HiveColumnHandle, ColumnMetadata> columnMetadataGetter(Table table, TypeManager typeManager) {
ImmutableList.Builder<String> columnNames = ImmutableList.builder();
table.getPartitionColumns().stream().map(Column::getName).forEach(columnNames::add);
table.getDataColumns().stream().map(Column::getName).forEach(columnNames::add);
List<String> allColumnNames = columnNames.build();
if (allColumnNames.size() > Sets.newHashSet(allColumnNames).size()) {
throw new PrestoException(HIVE_INVALID_METADATA, format("Hive metadata for table %s is invalid: Table descriptor contains duplicate columns", table.getTableName()));
}
List<Column> tableColumns = table.getDataColumns();
ImmutableMap.Builder<String, Optional<String>> builder = ImmutableMap.builder();
for (Column field : concat(tableColumns, table.getPartitionColumns())) {
if ((field.getComment() != null) && !Optional.of("from deserializer").equals(field.getComment())) {
builder.put(field.getName(), field.getComment());
} else {
builder.put(field.getName(), Optional.empty());
}
}
// add hidden columns
builder.put(PATH_COLUMN_NAME, Optional.empty());
if (table.getStorage().getBucketProperty().isPresent()) {
builder.put(BUCKET_COLUMN_NAME, Optional.empty());
}
Map<String, Optional<String>> columnComment = builder.build();
return handle -> new ColumnMetadata(handle.getName(), typeManager.getType(handle.getTypeSignature()), columnComment.get(handle.getName()).orElse(null), columnExtraInfo(handle.isPartitionKey()), handle.isHidden());
}
Aggregations