Search in sources :

Example 1 with GENERIC_INTERNAL_ERROR

use of io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR in project hetu-core by openlookeng.

the class CarbondataMetadata method updateSchemaInfoRenameColumn.

private SchemaEvolutionEntry updateSchemaInfoRenameColumn(ColumnHandle source, String target) {
    HiveColumnHandle oldColumnHandle = (HiveColumnHandle) source;
    String oldColumnName = oldColumnHandle.getColumnName();
    String newColumnName = target;
    if (!carbonTable.canAllow(carbonTable, TableOperation.ALTER_COLUMN_RENAME, oldColumnHandle.getName())) {
        throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Alter table rename column is  not supported for index indexschema"));
    }
    TableSchema tableSchema = tableInfo.getFactTable();
    List<ColumnSchema> tableColumns = tableSchema.getListOfColumns();
    if (!tableColumns.stream().map(cols -> cols.getColumnName()).collect(toList()).contains(oldColumnHandle.getName())) {
        throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Column " + oldColumnHandle.getName() + "does not exist in " + carbonTable.getDatabaseName() + "." + carbonTable.getTableName()));
    }
    List<ColumnSchema> carbonColumns = carbonTable.getCreateOrderColumn().stream().filter(cols -> !cols.isInvisible()).map(cols -> cols.getColumnSchema()).collect(toList());
    ColumnSchema oldCarbonColumn = carbonColumns.stream().filter(cols -> cols.getColumnName().equalsIgnoreCase(oldColumnName)).findFirst().get();
    validateColumnsForRenaming(oldCarbonColumn);
    TableSchemaBuilder schemaBuilder = new TableSchemaBuilder();
    ColumnSchema deletedColumn = schemaBuilder.addColumn(new StructField(oldColumnHandle.getName(), CarbondataHetuFilterUtil.spi2CarbondataTypeMapper(oldColumnHandle)), null, false, false);
    SchemaEvolutionEntry schemaEvolutionEntry = new SchemaEvolutionEntry();
    tableColumns.forEach(cols -> {
        if (cols.getColumnName().equalsIgnoreCase(oldColumnName)) {
            cols.setColumnName(newColumnName);
            schemaEvolutionEntry.setTimeStamp(timeStamp);
            schemaEvolutionEntry.setAdded(Arrays.asList(cols));
            schemaEvolutionEntry.setRemoved(Arrays.asList(deletedColumn));
        }
    });
    Map<String, String> tableProperties = tableInfo.getFactTable().getTableProperties();
    tableProperties.forEach((tablePropertyKey, tablePropertyValue) -> {
        if (tablePropertyKey.equalsIgnoreCase(oldColumnName)) {
            tableProperties.put(tablePropertyKey, newColumnName);
        }
    });
    tableInfo.setLastUpdatedTime(System.currentTimeMillis());
    tableInfo.setFactTable(tableSchema);
    return schemaEvolutionEntry;
}
Also used : Arrays(java.util.Arrays) StorageFormat(io.prestosql.plugin.hive.metastore.StorageFormat) BaseStorageFormat(io.prestosql.plugin.hive.BaseStorageFormat) HiveTableHandle(io.prestosql.plugin.hive.HiveTableHandle) FileSystem(org.apache.hadoop.fs.FileSystem) HiveWriteUtils(io.prestosql.plugin.hive.HiveWriteUtils) HiveUtil.hiveColumnHandles(io.prestosql.plugin.hive.HiveUtil.hiveColumnHandles) MetastoreUtil(io.prestosql.plugin.hive.metastore.MetastoreUtil) TableAlreadyExistsException(io.prestosql.spi.connector.TableAlreadyExistsException) ConnectorVacuumTableHandle(io.prestosql.spi.connector.ConnectorVacuumTableHandle) StringUtils(org.apache.commons.lang3.StringUtils) CarbonLockFactory(org.apache.carbondata.core.locks.CarbonLockFactory) HiveUtil.getPartitionKeyColumnHandles(io.prestosql.plugin.hive.HiveUtil.getPartitionKeyColumnHandles) ConnectorDeleteAsInsertTableHandle(io.prestosql.spi.connector.ConnectorDeleteAsInsertTableHandle) CarbonCommonConstants(org.apache.carbondata.core.constants.CarbonCommonConstants) Future(java.util.concurrent.Future) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) ConnectorUpdateTableHandle(io.prestosql.spi.connector.ConnectorUpdateTableHandle) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) HIVE_STRING(io.prestosql.plugin.hive.HiveType.HIVE_STRING) StringEscapeUtils(org.apache.commons.lang3.StringEscapeUtils) HiveErrorCode(io.prestosql.plugin.hive.HiveErrorCode) ThriftWrapperSchemaConverterImpl(org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) Set(java.util.Set) LOCATION_PROPERTY(io.prestosql.plugin.hive.HiveTableProperties.LOCATION_PROPERTY) HiveTableProperties.getTransactionalValue(io.prestosql.plugin.hive.HiveTableProperties.getTransactionalValue) HiveOutputTableHandle(io.prestosql.plugin.hive.HiveOutputTableHandle) Collectors.joining(java.util.stream.Collectors.joining) BlockMappingVO(org.apache.carbondata.core.mutate.data.BlockMappingVO) CarbonLoadModel(org.apache.carbondata.processing.loading.model.CarbonLoadModel) META_TABLE_NAME(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME) Table(io.prestosql.plugin.hive.metastore.Table) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) AccessControlMetadata(io.prestosql.plugin.hive.security.AccessControlMetadata) TableOptionConstant(org.apache.carbondata.processing.util.TableOptionConstant) SortingColumn(io.prestosql.plugin.hive.metastore.SortingColumn) PartitionInfo(org.apache.carbondata.core.metadata.schema.PartitionInfo) TypeTranslator(io.prestosql.plugin.hive.TypeTranslator) ConnectorVacuumTableInfo(io.prestosql.spi.connector.ConnectorVacuumTableInfo) MapredCarbonOutputFormat(org.apache.carbondata.hive.MapredCarbonOutputFormat) StructField(org.apache.carbondata.core.metadata.datatype.StructField) CarbonUtil(org.apache.carbondata.core.util.CarbonUtil) CarbondataTableProperties.getCarbondataLocation(io.hetu.core.plugin.carbondata.CarbondataTableProperties.getCarbondataLocation) HiveWriterFactory(io.prestosql.plugin.hive.HiveWriterFactory) Database(io.prestosql.plugin.hive.metastore.Database) SchemaEvolutionEntry(org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry) Slice(io.airlift.slice.Slice) Partition(io.prestosql.plugin.hive.metastore.Partition) TRANSACTIONAL(io.prestosql.plugin.hive.HiveTableProperties.TRANSACTIONAL) DataTypes(org.apache.carbondata.core.metadata.datatype.DataTypes) CarbonDataMergerUtil(org.apache.carbondata.processing.merger.CarbonDataMergerUtil) SimpleDateFormat(java.text.SimpleDateFormat) ComputedStatistics(io.prestosql.spi.statistics.ComputedStatistics) CarbondataTableReader(io.hetu.core.plugin.carbondata.impl.CarbondataTableReader) ArrayList(java.util.ArrayList) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment) ThreadLocalSessionInfo(org.apache.carbondata.core.util.ThreadLocalSessionInfo) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) LocationService(io.prestosql.plugin.hive.LocationService) LockUsage(org.apache.carbondata.core.locks.LockUsage) CarbonUpdateUtil(org.apache.carbondata.core.mutate.CarbonUpdateUtil) SemiTransactionalHiveMetastore(io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore) SegmentStatusManager(org.apache.carbondata.core.statusmanager.SegmentStatusManager) ConnectorOutputTableHandle(io.prestosql.spi.connector.ConnectorOutputTableHandle) Properties(java.util.Properties) PartitionStatistics(io.prestosql.plugin.hive.PartitionStatistics) CarbonFile(org.apache.carbondata.core.datastore.filesystem.CarbonFile) CarbonOutputCommitter(org.apache.carbondata.hadoop.api.CarbonOutputCommitter) HiveStorageFormat(io.prestosql.plugin.hive.HiveStorageFormat) CarbonTablePath(org.apache.carbondata.core.util.path.CarbonTablePath) HiveInsertTableHandle(io.prestosql.plugin.hive.HiveInsertTableHandle) HiveTableProperties(io.prestosql.plugin.hive.HiveTableProperties) TypeManager(io.prestosql.spi.type.TypeManager) ICarbonLock(org.apache.carbondata.core.locks.ICarbonLock) IOException(java.io.IOException) CarbonTableIdentifier(org.apache.carbondata.core.metadata.CarbonTableIdentifier) PrincipalPrivileges(io.prestosql.plugin.hive.metastore.PrincipalPrivileges) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) File(java.io.File) ColumnSchema(org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema) ExecutionException(java.util.concurrent.ExecutionException) OutputCommitter(org.apache.hadoop.mapreduce.OutputCommitter) TreeMap(java.util.TreeMap) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) AbsoluteTableIdentifier(org.apache.carbondata.core.metadata.AbsoluteTableIdentifier) HiveWrittenPartitions(io.prestosql.plugin.hive.HiveWrittenPartitions) TableType(org.apache.hadoop.hive.metastore.TableType) META_TABLE_LOCATION(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_LOCATION) ConfigurationUtils(io.prestosql.plugin.hive.util.ConfigurationUtils) LocationHandle(io.prestosql.plugin.hive.LocationHandle) CarbonTableOutputFormat(org.apache.carbondata.hadoop.api.CarbonTableOutputFormat) CarbonMetadata(org.apache.carbondata.core.metadata.CarbonMetadata) HiveBasicStatistics(io.prestosql.plugin.hive.HiveBasicStatistics) HivePartitionManager(io.prestosql.plugin.hive.HivePartitionManager) ThriftWriter(org.apache.carbondata.core.writer.ThriftWriter) HiveTypeName(io.prestosql.plugin.hive.HiveTypeName) Date(java.util.Date) SYNTHESIZED(io.prestosql.plugin.hive.HiveColumnHandle.ColumnType.SYNTHESIZED) HiveColumnHandle(io.prestosql.plugin.hive.HiveColumnHandle) Duration(io.airlift.units.Duration) SegmentFileStore(org.apache.carbondata.core.metadata.SegmentFileStore) TaskType(org.apache.hadoop.mapreduce.TaskType) Logger(org.apache.log4j.Logger) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) Gson(com.google.gson.Gson) Locale(java.util.Locale) HiveCarbonUtil(org.apache.carbondata.hive.util.HiveCarbonUtil) Path(org.apache.hadoop.fs.Path) SegmentUpdateStatusManager(org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager) Type(io.prestosql.spi.type.Type) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) EncodedLoadModel(io.hetu.core.plugin.carbondata.CarbondataConstants.EncodedLoadModel) HiveBucketing(io.prestosql.plugin.hive.HiveBucketing) PrestoException(io.prestosql.spi.PrestoException) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) FileInputFormat(org.apache.hadoop.mapred.FileInputFormat) Collection(java.util.Collection) UUID(java.util.UUID) TableSchema(org.apache.carbondata.core.metadata.schema.table.TableSchema) HiveType(io.prestosql.plugin.hive.HiveType) CarbonLockUtil(org.apache.carbondata.core.locks.CarbonLockUtil) Collectors(java.util.stream.Collectors) String.format(java.lang.String.format) CarbonTableInputFormat(org.apache.carbondata.hadoop.api.CarbonTableInputFormat) List(java.util.List) TaskAttemptContextImpl(org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl) Job(org.apache.hadoop.mapreduce.Job) TableSchemaBuilder(org.apache.carbondata.core.metadata.schema.table.TableSchemaBuilder) Optional(java.util.Optional) NOT_SUPPORTED(io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED) HiveStatisticsProvider(io.prestosql.plugin.hive.statistics.HiveStatisticsProvider) JsonCodec(io.airlift.json.JsonCodec) HiveBucketProperty(io.prestosql.plugin.hive.HiveBucketProperty) ConnectorOutputMetadata(io.prestosql.spi.connector.ConnectorOutputMetadata) Segment(org.apache.carbondata.core.index.Segment) HiveSessionProperties(io.prestosql.plugin.hive.HiveSessionProperties) ConnectorNewTableLayout(io.prestosql.spi.connector.ConnectorNewTableLayout) TableInfo(org.apache.carbondata.core.metadata.schema.table.TableInfo) HashMap(java.util.HashMap) TableOperation(org.apache.carbondata.core.features.TableOperation) CompactionType(org.apache.carbondata.processing.merger.CompactionType) IOConstants(org.apache.hadoop.hive.ql.io.IOConstants) HiveUtil.toPartitionValues(io.prestosql.plugin.hive.HiveUtil.toPartitionValues) FileFactory(org.apache.carbondata.core.datastore.impl.FileFactory) SegmentStatus(org.apache.carbondata.core.statusmanager.SegmentStatus) LoadMetadataDetails(org.apache.carbondata.core.statusmanager.LoadMetadataDetails) Function(java.util.function.Function) ObjectSerializationUtil(org.apache.carbondata.core.util.ObjectSerializationUtil) JobContextImpl(org.apache.hadoop.mapreduce.task.JobContextImpl) HashSet(java.util.HashSet) JobStatus(org.apache.hadoop.mapred.JobStatus) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) ImmutableList(com.google.common.collect.ImmutableList) FileWriteOperation(org.apache.carbondata.core.fileoperations.FileWriteOperation) Objects.requireNonNull(java.util.Objects.requireNonNull) CarbonLoaderUtil(org.apache.carbondata.processing.util.CarbonLoaderUtil) HiveACIDWriteType(io.prestosql.plugin.hive.HiveACIDWriteType) HiveMetadata(io.prestosql.plugin.hive.HiveMetadata) LogServiceFactory(org.apache.carbondata.common.logging.LogServiceFactory) JobID(org.apache.hadoop.mapreduce.JobID) NoSuchElementException(java.util.NoSuchElementException) SegmentUpdateDetails(org.apache.carbondata.core.mutate.SegmentUpdateDetails) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) HiveUpdateTableHandle(io.prestosql.plugin.hive.HiveUpdateTableHandle) TableProcessingOperations(org.apache.carbondata.processing.loading.TableProcessingOperations) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) NON_INHERITABLE_PROPERTIES(io.prestosql.plugin.hive.HiveTableProperties.NON_INHERITABLE_PROPERTIES) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException) NoSuchMVException(org.apache.carbondata.common.exceptions.sql.NoSuchMVException) Maps(com.google.common.collect.Maps) HiveDeleteAsInsertTableHandle(io.prestosql.plugin.hive.HiveDeleteAsInsertTableHandle) RowCountDetailsVO(org.apache.carbondata.core.mutate.data.RowCountDetailsVO) CarbondataTableCacheModel(io.hetu.core.plugin.carbondata.impl.CarbondataTableCacheModel) PartitionUpdate(io.prestosql.plugin.hive.PartitionUpdate) JobConf(org.apache.hadoop.mapred.JobConf) TimeUnit(java.util.concurrent.TimeUnit) Collectors.toList(java.util.stream.Collectors.toList) ConcurrentSkipListSet(java.util.concurrent.ConcurrentSkipListSet) Column(io.prestosql.plugin.hive.metastore.Column) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Comparator(java.util.Comparator) ConnectorInsertTableHandle(io.prestosql.spi.connector.ConnectorInsertTableHandle) SchemaConverter(org.apache.carbondata.core.metadata.converter.SchemaConverter) SchemaEvolutionEntry(org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry) StructField(org.apache.carbondata.core.metadata.datatype.StructField) TableSchema(org.apache.carbondata.core.metadata.schema.table.TableSchema) TableSchemaBuilder(org.apache.carbondata.core.metadata.schema.table.TableSchemaBuilder) PrestoException(io.prestosql.spi.PrestoException) ColumnSchema(org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema) HiveColumnHandle(io.prestosql.plugin.hive.HiveColumnHandle)

Example 2 with GENERIC_INTERNAL_ERROR

use of io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR in project hetu-core by openlookeng.

the class CarbondataFileWriter method appendRow.

public void appendRow(Page dataPage, int position) {
    FileSinkOperator.RecordWriter finalRecordWriter = null;
    if (HiveACIDWriteType.isUpdateOrDelete(acidWriteType)) {
        try {
            DeleteDeltaBlockDetails deleteDeltaBlockDetails = null;
            SegmentUpdateDetails segmentUpdateDetails = null;
            String tupleId = getUpdateTupleIdFromRec(dataPage, position);
            String blockId = CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.BLOCK_ID);
            String blockletId = CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.BLOCKLET_ID);
            String pageId = CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.PAGE_ID);
            String rowId = CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.OFFSET);
            String segmentId = CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.SEGMENT_ID);
            String segmentBlockId = CarbonUpdateUtil.getSegmentWithBlockFromTID(tupleId, false);
            String blockName = CarbonUpdateUtil.getBlockName(CarbonTablePath.addDataPartPrefix(blockId));
            String completeBlockName = CarbonTablePath.addDataPartPrefix(blockId + CarbonCommonConstants.FACT_FILE_EXT);
            String blockPath = CarbonUpdateUtil.getTableBlockPath(tupleId, tablePath, true);
            String deltaPath = CarbonUpdateUtil.getDeleteDeltaFilePath(blockPath, blockName, txnTimeStamp);
            deleteDeltaBlockDetails = deleteDeltaDetailsMap.computeIfAbsent(deltaPath, v -> new DeleteDeltaBlockDetails(blockName));
            deltaPathSegmentMap.put(deltaPath, segmentId);
            segmentUpdateDetails = segmentUpdateDetailMap.computeIfAbsent(segmentBlockId, v -> new SegmentUpdateDetails() {

                {
                    setSegmentName(segmentId);
                    setBlockName(blockName);
                    setActualBlockName(completeBlockName);
                    setDeleteDeltaEndTimestamp(txnTimeStamp);
                    setDeleteDeltaStartTimestamp(txnTimeStamp);
                    setDeletedRowsInBlock(segmentUpdateStatusManager.getDetailsForABlock(segmentBlockId) != null ? segmentUpdateStatusManager.getDetailsForABlock(segmentBlockId).getDeletedRowsInBlock() : "0");
                }
            });
            Long deletedRows = Long.parseLong(segmentUpdateDetails.getDeletedRowsInBlock()) + 1;
            segmentUpdateDetails.setDeletedRowsInBlock(Long.toString(deletedRows));
            if (!deleteDeltaBlockDetails.addBlocklet(blockletId, rowId, Integer.parseInt(pageId))) {
                LOG.error("Multiple input rows matched for same row!");
                throw new MultipleMatchingException("Multiple input rows matched for same row!");
            }
            if (HiveACIDWriteType.DELETE == acidWriteType) {
                return;
            }
            finalRecordWriter = segmentRecordWriterMap.computeIfAbsent(segmentId, v -> {
                try {
                    return getHiveWriter(segmentId, CarbonUpdateUtil.getLatestTaskIdForSegment(new Segment(segmentId), tablePath) + 1);
                } catch (Exception e) {
                    LOG.error("error while getting Carbon :: hiveRecordWriter", e);
                    throw new RuntimeException("error while getting Carbon :: hiveRecordWriter");
                }
            });
        } catch (Exception e) {
            LOG.error("error while initializing writer", e);
            throw new PrestoException(GENERIC_INTERNAL_ERROR, "writer class not found", e);
        }
    } else {
        finalRecordWriter = this.recordWriter;
    }
    for (int field = 0; field < fieldCount; field++) {
        Block block = dataPage.getBlock(field);
        if (block.isNull(position)) {
            tableInspector.setStructFieldData(row, structFields.get(field), null);
        } else {
            setters[field].setField(block, position);
        }
    }
    try {
        if (finalRecordWriter != null) {
            finalRecordWriter.write(serDe.serialize(row, tableInspector));
        }
    } catch (SerDeException | IOException e) {
        throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
    }
}
Also used : StringUtils(org.apache.commons.lang.StringUtils) DateTimeZone(org.joda.time.DateTimeZone) Arrays(java.util.Arrays) Date(java.util.Date) Text(org.apache.hadoop.io.Text) HiveFileWriter(io.prestosql.plugin.hive.HiveFileWriter) DeleteDeltaBlockDetails(org.apache.carbondata.core.mutate.DeleteDeltaBlockDetails) CarbonCommonConstants(org.apache.carbondata.core.constants.CarbonCommonConstants) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) TaskType(org.apache.hadoop.mapreduce.TaskType) Logger(org.apache.log4j.Logger) Gson(com.google.gson.Gson) Map(java.util.Map) HiveCarbonUtil(org.apache.carbondata.hive.util.HiveCarbonUtil) Path(org.apache.hadoop.fs.Path) SegmentUpdateStatusManager(org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager) Type(io.prestosql.spi.type.Type) StringEscapeUtils(org.apache.commons.lang3.StringEscapeUtils) TaskAttemptID(org.apache.hadoop.mapred.TaskAttemptID) TupleIdEnum(org.apache.carbondata.core.mutate.TupleIdEnum) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) PrestoException(io.prestosql.spi.PrestoException) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ArrayWritableObjectInspector(org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector) HiveType(io.prestosql.plugin.hive.HiveType) CarbonLoadModel(org.apache.carbondata.processing.loading.model.CarbonLoadModel) UncheckedIOException(java.io.UncheckedIOException) List(java.util.List) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) CarbonHiveSerDe(org.apache.carbondata.hive.CarbonHiveSerDe) TableOptionConstant(org.apache.carbondata.processing.util.TableOptionConstant) Optional(java.util.Optional) MapredCarbonOutputFormat(org.apache.carbondata.hive.MapredCarbonOutputFormat) MultipleMatchingException(org.apache.carbondata.processing.exception.MultipleMatchingException) FieldSetterFactory(io.prestosql.plugin.hive.util.FieldSetterFactory) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Segment(org.apache.carbondata.core.index.Segment) SimpleDateFormat(java.text.SimpleDateFormat) IOConstants(org.apache.hadoop.hive.ql.io.IOConstants) SegmentStatus(org.apache.carbondata.core.statusmanager.SegmentStatus) LoadMetadataDetails(org.apache.carbondata.core.statusmanager.LoadMetadataDetails) OptionalInt(java.util.OptionalInt) ObjectSerializationUtil(org.apache.carbondata.core.util.ObjectSerializationUtil) ImmutableList(com.google.common.collect.ImmutableList) COMPRESSRESULT(org.apache.hadoop.hive.conf.HiveConf.ConfVars.COMPRESSRESULT) Objects.requireNonNull(java.util.Objects.requireNonNull) CarbonUpdateUtil(org.apache.carbondata.core.mutate.CarbonUpdateUtil) HiveACIDWriteType(io.prestosql.plugin.hive.HiveACIDWriteType) LogServiceFactory(org.apache.carbondata.common.logging.LogServiceFactory) SegmentStatusManager(org.apache.carbondata.core.statusmanager.SegmentStatusManager) Block(io.prestosql.spi.block.Block) SegmentUpdateDetails(org.apache.carbondata.core.mutate.SegmentUpdateDetails) Properties(java.util.Properties) Reporter(org.apache.hadoop.mapred.Reporter) CarbonTablePath(org.apache.carbondata.core.util.path.CarbonTablePath) HiveConf(org.apache.hadoop.hive.conf.HiveConf) TypeManager(io.prestosql.spi.type.TypeManager) AcidOutputFormat(org.apache.hadoop.hive.ql.io.AcidOutputFormat) Page(io.prestosql.spi.Page) IOException(java.io.IOException) RowCountDetailsVO(org.apache.carbondata.core.mutate.data.RowCountDetailsVO) HIVE_WRITER_DATA_ERROR(io.prestosql.plugin.hive.HiveErrorCode.HIVE_WRITER_DATA_ERROR) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) JobConf(org.apache.hadoop.mapred.JobConf) Collectors.toList(java.util.stream.Collectors.toList) CarbonDeleteDeltaWriterImpl(org.apache.carbondata.core.writer.CarbonDeleteDeltaWriterImpl) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) CarbonTableOutputFormat(org.apache.carbondata.hadoop.api.CarbonTableOutputFormat) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) PrestoException(io.prestosql.spi.PrestoException) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) Segment(org.apache.carbondata.core.index.Segment) PrestoException(io.prestosql.spi.PrestoException) UncheckedIOException(java.io.UncheckedIOException) MultipleMatchingException(org.apache.carbondata.processing.exception.MultipleMatchingException) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) SegmentUpdateDetails(org.apache.carbondata.core.mutate.SegmentUpdateDetails) MultipleMatchingException(org.apache.carbondata.processing.exception.MultipleMatchingException) DeleteDeltaBlockDetails(org.apache.carbondata.core.mutate.DeleteDeltaBlockDetails) Block(io.prestosql.spi.block.Block) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 3 with GENERIC_INTERNAL_ERROR

use of io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR in project hetu-core by openlookeng.

the class CarbondataPageSourceProvider method createPageSource.

@Override
public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, ConnectorTableHandle table, List<ColumnHandle> columns) {
    HiveSplit carbonSplit = Types.checkType(((HiveSplitWrapper) (split)).getSplits().get(0), HiveSplit.class, "split is not class HiveSplit");
    this.queryId = carbonSplit.getSchema().getProperty("queryId");
    if (this.queryId == null) {
        // Fall back to hive pagesource.
        return super.createPageSource(transactionHandle, session, split, table, columns);
    }
    try {
        hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session, carbonSplit.getDatabase()), new Path(carbonSplit.getSchema().getProperty("tablePath")));
    } catch (IOException e) {
        throw new PrestoException(GENERIC_INTERNAL_ERROR, "Failed to get file system: " + e.getMessage());
    }
    return hdfsEnvironment.doAs(session.getUser(), () -> {
        Configuration configuration = this.hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session, carbonSplit.getDatabase(), carbonSplit.getTable()), new Path(carbonSplit.getSchema().getProperty("tablePath")));
        CarbonTable carbonTable = getCarbonTable(carbonSplit, configuration);
        /* So that CarbonTLS can access it */
        ThreadLocalSessionInfo.setConfigurationToCurrentThread(configuration);
        boolean isFullACID = isFullAcidTable(Maps.fromProperties(carbonSplit.getSchema()));
        boolean isDirectVectorFill = (carbonTableReader.config.getPushRowFilter() == null) || carbonTableReader.config.getPushRowFilter().equalsIgnoreCase("false") || columns.stream().anyMatch(c -> c.getColumnName().equalsIgnoreCase(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID));
        return new CarbondataPageSource(carbonTable, queryId, carbonSplit, columns, table, configuration, isDirectVectorFill, isFullACID, session.getUser(), hdfsEnvironment);
    });
}
Also used : Path(org.apache.hadoop.fs.Path) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) HiveSplitWrapper(io.prestosql.plugin.hive.HiveSplitWrapper) ConnectorSplit(io.prestosql.spi.connector.ConnectorSplit) Inject(com.google.inject.Inject) CarbondataTableReader(io.hetu.core.plugin.carbondata.impl.CarbondataTableReader) CarbonCommonConstants(org.apache.carbondata.core.constants.CarbonCommonConstants) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) ThreadLocalSessionInfo(org.apache.carbondata.core.util.ThreadLocalSessionInfo) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) HiveConfig(io.prestosql.plugin.hive.HiveConfig) Configuration(org.apache.hadoop.conf.Configuration) Objects.requireNonNull(java.util.Objects.requireNonNull) DynamicFilterSupplier(io.prestosql.spi.dynamicfilter.DynamicFilterSupplier) Path(org.apache.hadoop.fs.Path) HivePageSourceFactory(io.prestosql.plugin.hive.HivePageSourceFactory) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) PrestoException(io.prestosql.spi.PrestoException) AcidUtils.isFullAcidTable(org.apache.hadoop.hive.ql.io.AcidUtils.isFullAcidTable) HivePageSourceProvider(io.prestosql.plugin.hive.HivePageSourceProvider) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) TypeManager(io.prestosql.spi.type.TypeManager) Set(java.util.Set) IOException(java.io.IOException) Maps(com.google.common.collect.Maps) CarbondataTableCacheModel(io.hetu.core.plugin.carbondata.impl.CarbondataTableCacheModel) HiveSplit(io.prestosql.plugin.hive.HiveSplit) List(java.util.List) ConnectorPageSource(io.prestosql.spi.connector.ConnectorPageSource) ColumnHandle(io.prestosql.spi.connector.ColumnHandle) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) ConnectorTransactionHandle(io.prestosql.spi.connector.ConnectorTransactionHandle) Optional(java.util.Optional) HiveRecordCursorProvider(io.prestosql.plugin.hive.HiveRecordCursorProvider) HiveSplit(io.prestosql.plugin.hive.HiveSplit) Configuration(org.apache.hadoop.conf.Configuration) HiveSplitWrapper(io.prestosql.plugin.hive.HiveSplitWrapper) PrestoException(io.prestosql.spi.PrestoException) IOException(java.io.IOException) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment)

Example 4 with GENERIC_INTERNAL_ERROR

use of io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR in project hetu-core by openlookeng.

the class HiveSplitManager method getSplits.

@Override
public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle tableHandle, SplitSchedulingStrategy splitSchedulingStrategy, Supplier<List<Set<DynamicFilter>>> dynamicFilterSupplier, Optional<QueryType> queryType, Map<String, Object> queryInfo, Set<TupleDomain<ColumnMetadata>> userDefinedCachePredicates, boolean partOfReuse) {
    HiveTableHandle hiveTable = (HiveTableHandle) tableHandle;
    SchemaTableName tableName = hiveTable.getSchemaTableName();
    // get table metadata
    SemiTransactionalHiveMetastore metastore = metastoreProvider.apply((HiveTransactionHandle) transaction);
    Table table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()).orElseThrow(() -> new TableNotFoundException(tableName));
    if (table.getStorage().getStorageFormat().getInputFormat().contains("carbon")) {
        throw new PrestoException(NOT_SUPPORTED, "Hive connector can't read carbondata tables");
    }
    // verify table is not marked as non-readable
    String tableNotReadable = table.getParameters().get(OBJECT_NOT_READABLE);
    if (!isNullOrEmpty(tableNotReadable)) {
        throw new HiveNotReadableException(tableName, Optional.empty(), tableNotReadable);
    }
    // get partitions
    List<HivePartition> partitions = partitionManager.getOrLoadPartitions(session, metastore, new HiveIdentity(session), hiveTable);
    // short circuit if we don't have any partitions
    if (partitions.isEmpty()) {
        return new FixedSplitSource(ImmutableList.of());
    }
    // get buckets from first partition (arbitrary)
    Optional<HiveBucketing.HiveBucketFilter> bucketFilter = hiveTable.getBucketFilter();
    // validate bucket bucketed execution
    Optional<HiveBucketHandle> bucketHandle = hiveTable.getBucketHandle();
    if ((splitSchedulingStrategy == GROUPED_SCHEDULING) && !bucketHandle.isPresent()) {
        throw new PrestoException(GENERIC_INTERNAL_ERROR, "SchedulingPolicy is bucketed, but BucketHandle is not present");
    }
    // sort partitions
    partitions = Ordering.natural().onResultOf(HivePartition::getPartitionId).reverse().sortedCopy(partitions);
    Iterable<HivePartitionMetadata> hivePartitions = getPartitionMetadata(session, metastore, table, tableName, partitions, bucketHandle.map(HiveBucketHandle::toTableBucketProperty));
    HiveSplitLoader hiveSplitLoader = new BackgroundHiveSplitLoader(table, hivePartitions, hiveTable.getCompactEffectivePredicate(), BackgroundHiveSplitLoader.BucketSplitInfo.createBucketSplitInfo(bucketHandle, bucketFilter), session, hdfsEnvironment, namenodeStats, directoryLister, executor, splitLoaderConcurrency, recursiveDfsWalkerEnabled, metastore.getValidWriteIds(session, hiveTable, queryType.map(t -> t == QueryType.VACUUM).orElse(false)).map(validTxnWriteIdList -> validTxnWriteIdList.getTableValidWriteIdList(table.getDatabaseName() + "." + table.getTableName())), dynamicFilterSupplier, queryType, queryInfo, typeManager);
    HiveSplitSource splitSource;
    HiveStorageFormat hiveStorageFormat = HiveMetadata.extractHiveStorageFormat(table);
    switch(splitSchedulingStrategy) {
        case UNGROUPED_SCHEDULING:
            splitSource = HiveSplitSource.allAtOnce(session, table.getDatabaseName(), table.getTableName(), // For reuse, we should make sure to have same split size all time for a table.
            partOfReuse ? 0 : maxInitialSplits, maxOutstandingSplits, maxOutstandingSplitsSize, maxSplitsPerSecond, hiveSplitLoader, executor, new CounterStat(), dynamicFilterSupplier, userDefinedCachePredicates, typeManager, hiveConfig, hiveStorageFormat);
            break;
        case GROUPED_SCHEDULING:
            splitSource = HiveSplitSource.bucketed(session, table.getDatabaseName(), table.getTableName(), // For reuse, we should make sure to have same split size all time for a table.
            partOfReuse ? 0 : maxInitialSplits, maxOutstandingSplits, maxOutstandingSplitsSize, maxSplitsPerSecond, hiveSplitLoader, executor, new CounterStat(), dynamicFilterSupplier, userDefinedCachePredicates, typeManager, hiveConfig, hiveStorageFormat);
            break;
        default:
            throw new IllegalArgumentException("Unknown splitSchedulingStrategy: " + splitSchedulingStrategy);
    }
    hiveSplitLoader.start(splitSource);
    if (queryType.isPresent() && queryType.get() == QueryType.VACUUM) {
        HdfsContext hdfsContext = new HdfsContext(session, table.getDatabaseName(), table.getTableName());
        return new HiveVacuumSplitSource(splitSource, (HiveVacuumTableHandle) queryInfo.get("vacuumHandle"), hdfsEnvironment, hdfsContext, session);
    }
    return splitSource;
}
Also used : VersionEmbedder(io.prestosql.spi.VersionEmbedder) GROUPED_SCHEDULING(io.prestosql.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.GROUPED_SCHEDULING) Iterables.transform(com.google.common.collect.Iterables.transform) DynamicFilter(io.prestosql.spi.dynamicfilter.DynamicFilter) QueryType(io.prestosql.spi.resourcegroups.QueryType) HdfsContext(io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext) ConnectorSplitManager(io.prestosql.spi.connector.ConnectorSplitManager) MetastoreUtil(io.prestosql.plugin.hive.metastore.MetastoreUtil) SERVER_SHUTTING_DOWN(io.prestosql.spi.StandardErrorCode.SERVER_SHUTTING_DOWN) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) ConnectorSession(io.prestosql.spi.connector.ConnectorSession) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) BoundedExecutor(io.airlift.concurrent.BoundedExecutor) Iterables.concat(com.google.common.collect.Iterables.concat) Map(java.util.Map) PrestoException(io.prestosql.spi.PrestoException) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Set(java.util.Set) ConnectorSplitSource(io.prestosql.spi.connector.ConnectorSplitSource) Math.min(java.lang.Math.min) String.format(java.lang.String.format) DataSize(io.airlift.units.DataSize) List(java.util.List) Table(io.prestosql.plugin.hive.metastore.Table) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) ConnectorTransactionHandle(io.prestosql.spi.connector.ConnectorTransactionHandle) Optional(java.util.Optional) MoreObjects.firstNonNull(com.google.common.base.MoreObjects.firstNonNull) NOT_SUPPORTED(io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED) Nested(org.weakref.jmx.Nested) Partition(io.prestosql.plugin.hive.metastore.Partition) Strings.isNullOrEmpty(com.google.common.base.Strings.isNullOrEmpty) CounterStat(io.airlift.stats.CounterStat) Function(java.util.function.Function) Supplier(java.util.function.Supplier) Inject(javax.inject.Inject) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) Managed(org.weakref.jmx.Managed) Objects.requireNonNull(java.util.Objects.requireNonNull) FixedSplitSource(io.prestosql.spi.connector.FixedSplitSource) SemiTransactionalHiveMetastore(io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore) Nullable(javax.annotation.Nullable) ExecutorService(java.util.concurrent.ExecutorService) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) Iterator(java.util.Iterator) Executor(java.util.concurrent.Executor) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) ConnectorTableHandle(io.prestosql.spi.connector.ConnectorTableHandle) TupleDomain(io.prestosql.spi.predicate.TupleDomain) AbstractIterator(com.google.common.collect.AbstractIterator) TypeManager(io.prestosql.spi.type.TypeManager) Iterables.getOnlyElement(com.google.common.collect.Iterables.getOnlyElement) Ordering(com.google.common.collect.Ordering) Column(io.prestosql.plugin.hive.metastore.Column) SemiTransactionalHiveMetastore(io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore) CounterStat(io.airlift.stats.CounterStat) PrestoException(io.prestosql.spi.PrestoException) HiveIdentity(io.prestosql.plugin.hive.authentication.HiveIdentity) TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) FixedSplitSource(io.prestosql.spi.connector.FixedSplitSource) HdfsContext(io.prestosql.plugin.hive.HdfsEnvironment.HdfsContext) Table(io.prestosql.plugin.hive.metastore.Table) SchemaTableName(io.prestosql.spi.connector.SchemaTableName)

Example 5 with GENERIC_INTERNAL_ERROR

use of io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR in project hetu-core by openlookeng.

the class CreateTableTask method internalExecute.

@VisibleForTesting
public ListenableFuture<?> internalExecute(CreateTable statement, Metadata metadata, AccessControl accessControl, Session session, List<Expression> parameters) {
    checkArgument(!statement.getElements().isEmpty(), "no columns for table");
    QualifiedObjectName tableName = createQualifiedObjectName(session, statement, statement.getName());
    Optional<TableHandle> tableHandle = metadata.getTableHandle(session, tableName);
    if (tableHandle.isPresent()) {
        if (!statement.isNotExists()) {
            throw new SemanticException(TABLE_ALREADY_EXISTS, statement, "Table '%s' already exists", tableName);
        }
        return immediateFuture(null);
    }
    CatalogName catalogName = metadata.getCatalogHandle(session, tableName.getCatalogName()).orElseThrow(() -> new PrestoException(NOT_FOUND, "Catalog does not exist: " + tableName.getCatalogName()));
    LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<>();
    Map<String, Object> inheritedProperties = ImmutableMap.of();
    boolean includingProperties = false;
    for (TableElement element : statement.getElements()) {
        if (element instanceof ColumnDefinition) {
            ColumnDefinition column = (ColumnDefinition) element;
            String name = column.getName().getValue().toLowerCase(Locale.ENGLISH);
            Type type;
            try {
                type = metadata.getType(parseTypeSignature(column.getType()));
            } catch (TypeNotFoundException e) {
                throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName());
            }
            if (type.equals(UNKNOWN)) {
                throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName());
            }
            if (columns.containsKey(name)) {
                throw new SemanticException(DUPLICATE_COLUMN_NAME, column, "Column name '%s' specified more than once", column.getName());
            }
            if (!column.isNullable() && !metadata.getConnectorCapabilities(session, catalogName).contains(NOT_NULL_COLUMN_CONSTRAINT)) {
                throw new SemanticException(NOT_SUPPORTED, column, "Catalog '%s' does not support non-null column for column name '%s'", catalogName.getCatalogName(), column.getName());
            }
            Map<String, Expression> sqlProperties = mapFromProperties(column.getProperties());
            Map<String, Object> columnProperties = metadata.getColumnPropertyManager().getProperties(catalogName, tableName.getCatalogName(), sqlProperties, session, metadata, parameters);
            columns.put(name, new ColumnMetadata(name, type, column.isNullable(), column.getComment().orElse(null), null, false, columnProperties));
        } else if (element instanceof LikeClause) {
            LikeClause likeClause = (LikeClause) element;
            QualifiedObjectName likeTableName = createQualifiedObjectName(session, statement, likeClause.getTableName());
            if (!metadata.getCatalogHandle(session, likeTableName.getCatalogName()).isPresent()) {
                throw new SemanticException(MISSING_CATALOG, statement, "LIKE table catalog '%s' does not exist", likeTableName.getCatalogName());
            }
            if (!tableName.getCatalogName().equals(likeTableName.getCatalogName())) {
                throw new SemanticException(NOT_SUPPORTED, statement, "LIKE table across catalogs is not supported");
            }
            TableHandle likeTable = metadata.getTableHandle(session, likeTableName).orElseThrow(() -> new SemanticException(MISSING_TABLE, statement, "LIKE table '%s' does not exist", likeTableName));
            TableMetadata likeTableMetadata = metadata.getTableMetadata(session, likeTable);
            Optional<LikeClause.PropertiesOption> propertiesOption = likeClause.getPropertiesOption();
            if (propertiesOption.isPresent() && propertiesOption.get().equals(LikeClause.PropertiesOption.INCLUDING)) {
                if (includingProperties) {
                    throw new SemanticException(NOT_SUPPORTED, statement, "Only one LIKE clause can specify INCLUDING PROPERTIES");
                }
                includingProperties = true;
                // Don't inherit location property for sql statement "create table like"
                inheritedProperties = likeTableMetadata.getMetadata().getInheritableProperties();
            }
            likeTableMetadata.getColumns().stream().filter(column -> !column.isHidden()).forEach(column -> {
                if (columns.containsKey(column.getName().toLowerCase(Locale.ENGLISH))) {
                    throw new SemanticException(DUPLICATE_COLUMN_NAME, element, "Column name '%s' specified more than once", column.getName());
                }
                columns.put(column.getName().toLowerCase(Locale.ENGLISH), column);
            });
        } else {
            throw new PrestoException(GENERIC_INTERNAL_ERROR, "Invalid TableElement: " + element.getClass().getName());
        }
    }
    accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), tableName);
    Map<String, Expression> sqlProperties = mapFromProperties(statement.getProperties());
    Map<String, Object> properties = metadata.getTablePropertyManager().getProperties(catalogName, tableName.getCatalogName(), sqlProperties, session, metadata, parameters);
    Map<String, Object> finalProperties = combineProperties(sqlProperties.keySet(), properties, inheritedProperties);
    ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(toSchemaTableName(tableName), ImmutableList.copyOf(columns.values()), finalProperties, statement.getComment());
    try {
        metadata.createTable(session, tableName.getCatalogName(), tableMetadata, statement.isNotExists());
    } catch (PrestoException e) {
        // connectors are not required to handle the ignoreExisting flag
        if (!e.getErrorCode().equals(ALREADY_EXISTS.toErrorCode()) || !statement.isNotExists()) {
            throw e;
        }
    }
    return immediateFuture(null);
}
Also used : LikeClause(io.prestosql.sql.tree.LikeClause) TableMetadata(io.prestosql.metadata.TableMetadata) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) ALREADY_EXISTS(io.prestosql.spi.StandardErrorCode.ALREADY_EXISTS) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) TableMetadata(io.prestosql.metadata.TableMetadata) LikeClause(io.prestosql.sql.tree.LikeClause) TransactionManager(io.prestosql.transaction.TransactionManager) HashMap(java.util.HashMap) TypeNotFoundException(io.prestosql.spi.type.TypeNotFoundException) NOT_FOUND(io.prestosql.spi.StandardErrorCode.NOT_FOUND) TableHandle(io.prestosql.spi.metadata.TableHandle) NOT_SUPPORTED(io.prestosql.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED) QualifiedObjectName(io.prestosql.spi.connector.QualifiedObjectName) LinkedHashMap(java.util.LinkedHashMap) SemanticException(io.prestosql.sql.analyzer.SemanticException) TypeSignature.parseTypeSignature(io.prestosql.spi.type.TypeSignature.parseTypeSignature) Preconditions.checkArgument(com.google.common.base.Preconditions.checkArgument) CreateTable(io.prestosql.sql.tree.CreateTable) ImmutableList(com.google.common.collect.ImmutableList) Locale(java.util.Locale) Map(java.util.Map) Session(io.prestosql.Session) TABLE_ALREADY_EXISTS(io.prestosql.sql.analyzer.SemanticErrorCode.TABLE_ALREADY_EXISTS) Type(io.prestosql.spi.type.Type) HeuristicIndexerManager(io.prestosql.heuristicindex.HeuristicIndexerManager) PrestoException(io.prestosql.spi.PrestoException) Futures.immediateFuture(com.google.common.util.concurrent.Futures.immediateFuture) AccessControl(io.prestosql.security.AccessControl) ImmutableMap(com.google.common.collect.ImmutableMap) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) TYPE_MISMATCH(io.prestosql.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH) CatalogName(io.prestosql.spi.connector.CatalogName) Set(java.util.Set) MISSING_TABLE(io.prestosql.sql.analyzer.SemanticErrorCode.MISSING_TABLE) DUPLICATE_COLUMN_NAME(io.prestosql.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME) Metadata(io.prestosql.metadata.Metadata) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) TableElement(io.prestosql.sql.tree.TableElement) MetadataUtil.toSchemaTableName(io.prestosql.metadata.MetadataUtil.toSchemaTableName) List(java.util.List) ColumnDefinition(io.prestosql.sql.tree.ColumnDefinition) GENERIC_INTERNAL_ERROR(io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR) Optional(java.util.Optional) VisibleForTesting(com.google.common.annotations.VisibleForTesting) MetadataUtil.createQualifiedObjectName(io.prestosql.metadata.MetadataUtil.createQualifiedObjectName) NodeUtils.mapFromProperties(io.prestosql.sql.NodeUtils.mapFromProperties) Expression(io.prestosql.sql.tree.Expression) NOT_NULL_COLUMN_CONSTRAINT(io.prestosql.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT) UNKNOWN(io.prestosql.spi.type.UnknownType.UNKNOWN) MISSING_CATALOG(io.prestosql.sql.analyzer.SemanticErrorCode.MISSING_CATALOG) ColumnMetadata(io.prestosql.spi.connector.ColumnMetadata) Optional(java.util.Optional) PrestoException(io.prestosql.spi.PrestoException) QualifiedObjectName(io.prestosql.spi.connector.QualifiedObjectName) MetadataUtil.createQualifiedObjectName(io.prestosql.metadata.MetadataUtil.createQualifiedObjectName) TableElement(io.prestosql.sql.tree.TableElement) LinkedHashMap(java.util.LinkedHashMap) ColumnDefinition(io.prestosql.sql.tree.ColumnDefinition) Type(io.prestosql.spi.type.Type) Expression(io.prestosql.sql.tree.Expression) TypeNotFoundException(io.prestosql.spi.type.TypeNotFoundException) TableHandle(io.prestosql.spi.metadata.TableHandle) CatalogName(io.prestosql.spi.connector.CatalogName) ConnectorTableMetadata(io.prestosql.spi.connector.ConnectorTableMetadata) SemanticException(io.prestosql.sql.analyzer.SemanticException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

PrestoException (io.prestosql.spi.PrestoException)9 GENERIC_INTERNAL_ERROR (io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR)9 List (java.util.List)9 Optional (java.util.Optional)9 ImmutableList (com.google.common.collect.ImmutableList)8 TypeManager (io.prestosql.spi.type.TypeManager)8 Map (java.util.Map)8 Objects.requireNonNull (java.util.Objects.requireNonNull)8 Set (java.util.Set)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 ColumnMetadata (io.prestosql.spi.connector.ColumnMetadata)7 ConnectorSession (io.prestosql.spi.connector.ConnectorSession)7 ConnectorTableHandle (io.prestosql.spi.connector.ConnectorTableHandle)7 SchemaTableName (io.prestosql.spi.connector.SchemaTableName)7 IOException (java.io.IOException)7 Gson (com.google.gson.Gson)6 CarbondataTableCacheModel (io.hetu.core.plugin.carbondata.impl.CarbondataTableCacheModel)6 CarbondataTableReader (io.hetu.core.plugin.carbondata.impl.CarbondataTableReader)6 HdfsEnvironment (io.prestosql.plugin.hive.HdfsEnvironment)6 HiveIdentity (io.prestosql.plugin.hive.authentication.HiveIdentity)6