use of io.prestosql.plugin.hive.HiveTypeTranslator in project carbondata by apache.
the class CarbondataModule method configure.
@Override
public void configure(Binder binder) {
binder.install(new HiveHdfsModule());
binder.bind(TypeTranslator.class).toInstance(new HiveTypeTranslator());
binder.bind(CoercionPolicy.class).to(HiveCoercionPolicy.class).in(Scopes.SINGLETON);
binder.bind(DirectoryLister.class).to(CachingDirectoryLister.class).in(Scopes.SINGLETON);
configBinder(binder).bindConfig(HiveConfig.class);
binder.bind(HiveSessionProperties.class).in(Scopes.SINGLETON);
binder.bind(HiveTableProperties.class).in(Scopes.SINGLETON);
binder.bind(HiveAnalyzeProperties.class).in(Scopes.SINGLETON);
binder.bind(PrestoS3ClientFactory.class).in(Scopes.SINGLETON);
binder.bind(CachingDirectoryLister.class).in(Scopes.SINGLETON);
newExporter(binder).export(CachingDirectoryLister.class).withGeneratedName();
binder.bind(HiveWriterStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(HiveWriterStats.class).withGeneratedName();
newSetBinder(binder, EventClient.class).addBinding().to(HiveEventClient.class).in(Scopes.SINGLETON);
binder.bind(HivePartitionManager.class).in(Scopes.SINGLETON);
binder.bind(LocationService.class).to(HiveLocationService.class).in(Scopes.SINGLETON);
binder.bind(HiveLocationService.class).to(CarbonDataLocationService.class).in(Scopes.SINGLETON);
binder.bind(HiveMetadataFactory.class).to(CarbonMetadataFactory.class).in(Scopes.SINGLETON);
binder.bind(TransactionalMetadataFactory.class).to(HiveMetadataFactory.class).in(Scopes.SINGLETON);
binder.bind(HiveTransactionManager.class).in(Scopes.SINGLETON);
binder.bind(ConnectorSplitManager.class).to(CarbondataSplitManager.class).in(Scopes.SINGLETON);
newExporter(binder).export(ConnectorSplitManager.class).as(generator -> generator.generatedNameOf(CarbondataSplitManager.class));
binder.bind(ConnectorPageSourceProvider.class).to(CarbondataPageSourceProvider.class).in(Scopes.SINGLETON);
binder.bind(ConnectorPageSinkProvider.class).to(CarbonDataPageSinkProvider.class).in(Scopes.SINGLETON);
binder.bind(ConnectorNodePartitioningProvider.class).to(HiveNodePartitioningProvider.class).in(Scopes.SINGLETON);
jsonCodecBinder(binder).bindJsonCodec(PartitionUpdate.class);
binder.bind(FileFormatDataSourceStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(FileFormatDataSourceStats.class).withGeneratedName();
Multibinder<HivePageSourceFactory> pageSourceFactoryBinder = newSetBinder(binder, HivePageSourceFactory.class);
pageSourceFactoryBinder.addBinding().to(OrcPageSourceFactory.class).in(Scopes.SINGLETON);
pageSourceFactoryBinder.addBinding().to(ParquetPageSourceFactory.class).in(Scopes.SINGLETON);
pageSourceFactoryBinder.addBinding().to(RcFilePageSourceFactory.class).in(Scopes.SINGLETON);
Multibinder<HiveRecordCursorProvider> recordCursorProviderBinder = newSetBinder(binder, HiveRecordCursorProvider.class);
recordCursorProviderBinder.addBinding().to(S3SelectRecordCursorProvider.class).in(Scopes.SINGLETON);
binder.bind(GenericHiveRecordCursorProvider.class).in(Scopes.SINGLETON);
Multibinder<HiveFileWriterFactory> fileWriterFactoryBinder = newSetBinder(binder, HiveFileWriterFactory.class);
binder.bind(OrcFileWriterFactory.class).in(Scopes.SINGLETON);
newExporter(binder).export(OrcFileWriterFactory.class).withGeneratedName();
configBinder(binder).bindConfig(OrcReaderConfig.class);
configBinder(binder).bindConfig(OrcWriterConfig.class);
fileWriterFactoryBinder.addBinding().to(OrcFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(RcFileFileWriterFactory.class).in(Scopes.SINGLETON);
configBinder(binder).bindConfig(ParquetReaderConfig.class);
configBinder(binder).bindConfig(ParquetWriterConfig.class);
fileWriterFactoryBinder.addBinding().to(CarbonDataFileWriterFactory.class).in(Scopes.SINGLETON);
binder.bind(CarbonTableReader.class).in(Scopes.SINGLETON);
jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class);
newSetBinder(binder, SystemTable.class);
// configure carbon properties
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.IS_QUERY_FROM_PRESTO, "true");
}
use of io.prestosql.plugin.hive.HiveTypeTranslator in project boostkit-bigdata by kunpengcompute.
the class HiveProjectPushdown method tryProjectPushdown.
protected static Optional<TableScanNode> tryProjectPushdown(ProjectNode plan, Map<String, Type> types) {
if (!(plan.getSource() instanceof TableScanNode)) {
return Optional.empty();
}
TableScanNode tableScanNode = (TableScanNode) plan.getSource();
ConnectorTableHandle tableHandle = tableScanNode.getTable().getConnectorHandle();
if (!(tableHandle instanceof HiveTableHandle) || !(((HiveTableHandle) tableHandle).getOffloadExpression().getProjections().isEmpty())) {
return Optional.empty();
}
Map<Symbol, ColumnHandle> assignments = new HashMap<>();
HiveTypeTranslator hiveTypeTranslator = new HiveTypeTranslator();
for (Map.Entry<Symbol, RowExpression> entry : plan.getAssignments().entrySet()) {
String name = entry.getKey().getName();
HiveType hiveType = HiveType.toHiveType(hiveTypeTranslator, entry.getValue().getType());
TypeSignature typeSignature = entry.getValue().getType().getTypeSignature();
HiveColumnHandle columnHandle = new HiveColumnHandle(name, hiveType, typeSignature, DUMMY_OFFLOADED_COLUMN_INDEX, DUMMY_OFFLOADED, Optional.of("projections pushed down " + name));
assignments.put(entry.getKey(), columnHandle);
}
BiMap<VariableReferenceExpression, VariableReferenceExpression> variableToColumnMapping = tableScanNode.getAssignments().entrySet().stream().collect(toImmutableBiMap(entry -> new VariableReferenceExpression(entry.getKey().getName(), types.get(entry.getKey().getName())), entry -> new VariableReferenceExpression(entry.getValue().getColumnName(), types.get(entry.getKey().getName()))));
ImmutableMap.Builder<Symbol, RowExpression> projections = new ImmutableMap.Builder<>();
for (Map.Entry<Symbol, RowExpression> entry : plan.getAssignments().getMap().entrySet()) {
RowExpression expression = replaceExpression(entry.getValue(), variableToColumnMapping);
if (!OmniExpressionChecker.checkExpression(expression)) {
return Optional.empty();
}
projections.put(entry.getKey(), expression);
}
HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle;
HiveOffloadExpression offloadExpression = hiveTableHandle.getOffloadExpression().updateProjections(projections.build(), getDataSourceColumns(tableScanNode));
HiveTableHandle newHiveTableHandle = hiveTableHandle.withOffloadExpression(offloadExpression);
TableHandle newTableHandle = new TableHandle(tableScanNode.getTable().getCatalogName(), newHiveTableHandle, tableScanNode.getTable().getTransaction(), tableScanNode.getTable().getLayout());
return Optional.of(new TableScanNode(tableScanNode.getId(), newTableHandle, ImmutableList.copyOf(assignments.keySet()), assignments, tableScanNode.getEnforcedConstraint(), tableScanNode.getPredicate(), tableScanNode.getStrategy(), tableScanNode.getReuseTableScanMappingId(), tableScanNode.getConsumerTableScanNodeCount(), tableScanNode.isForDelete()));
}
use of io.prestosql.plugin.hive.HiveTypeTranslator in project hetu-core by openlookeng.
the class CarbondataModule method configure.
@Override
public void configure(Binder binder) {
binder.bind(TypeTranslator.class).toInstance(new HiveTypeTranslator());
binder.bind(CoercionPolicy.class).to(HiveCoercionPolicy.class).in(Scopes.SINGLETON);
binder.bind(HdfsConfigurationInitializer.class).in(Scopes.SINGLETON);
newSetBinder(binder, DynamicConfigurationProvider.class);
binder.bind(HdfsConfiguration.class).to(HiveHdfsConfiguration.class).in(Scopes.SINGLETON);
binder.bind(HdfsEnvironment.class).in(Scopes.SINGLETON);
binder.bind(DirectoryLister.class).to(CachingDirectoryLister.class).in(Scopes.SINGLETON);
configBinder(binder).bindConfig(CarbondataConfig.class);
binder.bind(HiveSessionProperties.class).in(Scopes.SINGLETON);
binder.bind(CarbondataTableProperties.class).in(Scopes.SINGLETON);
binder.bind(HiveAnalyzeProperties.class).in(Scopes.SINGLETON);
binder.bind(NamenodeStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(NamenodeStats.class).as(generatedNameOf(NamenodeStats.class, connectorId));
Multibinder<HiveRecordCursorProvider> recordCursorProviderBinder = newSetBinder(binder, HiveRecordCursorProvider.class);
recordCursorProviderBinder.addBinding().to(GenericHiveRecordCursorProvider.class).in(Scopes.SINGLETON);
binder.bind(HiveWriterStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(HiveWriterStats.class).as(generatedNameOf(HiveWriterStats.class, connectorId));
newSetBinder(binder, EventClient.class).addBinding().to(HiveEventClient.class).in(Scopes.SINGLETON);
binder.bind(HivePartitionManager.class).in(Scopes.SINGLETON);
binder.bind(LocationService.class).to(CarbondataLocationService.class).in(Scopes.SINGLETON);
binder.bind(HiveMetadataFactory.class).to(CarbondataMetadataFactory.class).in(Scopes.SINGLETON);
binder.bind(new TypeLiteral<Supplier<TransactionalMetadata>>() {
}).to(HiveMetadataFactory.class).in(Scopes.SINGLETON);
binder.bind(HiveTransactionManager.class).in(Scopes.SINGLETON);
binder.bind(ConnectorSplitManager.class).to(CarbondataSplitManager.class).in(Scopes.SINGLETON);
newExporter(binder).export(ConnectorSplitManager.class).as(generatedNameOf(HiveSplitManager.class, connectorId));
binder.bind(ConnectorPageSourceProvider.class).to(CarbondataPageSourceProvider.class).in(Scopes.SINGLETON);
binder.bind(ConnectorPageSinkProvider.class).to(CarbondataPageSinkProvider.class).in(Scopes.SINGLETON);
binder.bind(ConnectorNodePartitioningProvider.class).to(HiveNodePartitioningProvider.class).in(Scopes.SINGLETON);
jsonCodecBinder(binder).bindJsonCodec(PartitionUpdate.class);
jsonCodecBinder(binder).bindJsonCodec(CarbondataSegmentInfoUtil.class);
binder.bind(FileFormatDataSourceStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(FileFormatDataSourceStats.class).as(generatedNameOf(FileFormatDataSourceStats.class, connectorId));
Multibinder<HivePageSourceFactory> pageSourceFactoryBinder = newSetBinder(binder, HivePageSourceFactory.class);
pageSourceFactoryBinder.addBinding().to(OrcPageSourceFactory.class).in(Scopes.SINGLETON);
pageSourceFactoryBinder.addBinding().to(ParquetPageSourceFactory.class).in(Scopes.SINGLETON);
pageSourceFactoryBinder.addBinding().to(RcFilePageSourceFactory.class).in(Scopes.SINGLETON);
Multibinder<HiveFileWriterFactory> fileWriterFactoryBinder = newSetBinder(binder, HiveFileWriterFactory.class);
binder.bind(OrcFileWriterFactory.class).in(Scopes.SINGLETON);
newExporter(binder).export(OrcFileWriterFactory.class).as(generatedNameOf(OrcFileWriterFactory.class, connectorId));
configBinder(binder).bindConfig(OrcFileWriterConfig.class);
fileWriterFactoryBinder.addBinding().to(OrcFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(RcFileFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(CarbondataFileWriterFactory.class).in(Scopes.SINGLETON);
binder.bind(CarbondataTableReader.class).in(Scopes.SINGLETON);
configBinder(binder).bindConfig(ParquetFileWriterConfig.class);
}
Aggregations