Search in sources :

Example 96 with CarbonTable

use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.

the class HiveCarbonUtil method getCarbonTable.

public static CarbonTable getCarbonTable(Configuration tableProperties) throws SQLException {
    String[] tableUniqueName = tableProperties.get("name").split("\\.");
    String databaseName = tableUniqueName[0];
    String tableName = tableUniqueName[1];
    String tablePath = tableProperties.get(hive_metastoreConstants.META_TABLE_LOCATION);
    String columns = tableProperties.get(hive_metastoreConstants.META_TABLE_COLUMNS);
    String sortColumns = tableProperties.get("sort_columns");
    String columnTypes = tableProperties.get(hive_metastoreConstants.META_TABLE_COLUMN_TYPES);
    String partitionColumns = tableProperties.get(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
    String partitionColumnTypes = tableProperties.get(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
    if (partitionColumns != null) {
        columns = columns + "," + partitionColumns;
        columnTypes = columnTypes + ":" + partitionColumnTypes;
    }
    String[][] validatedColumnsAndTypes = validateColumnsAndTypes(columns, columnTypes);
    CarbonTable carbonTable = CarbonTable.buildFromTableInfo(HiveCarbonUtil.getTableInfo(tableName, databaseName, tablePath, sortColumns, validatedColumnsAndTypes[0], validatedColumnsAndTypes[1], new ArrayList<>()));
    carbonTable.setTransactionalTable(false);
    return carbonTable;
}
Also used : CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) ArrayList(java.util.ArrayList)

Example 97 with CarbonTable

use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.

the class MapredCarbonOutputCommitter method commitJob.

@Override
public void commitJob(JobContext jobContext) throws IOException {
    try {
        Configuration configuration = jobContext.getConfiguration();
        CarbonLoadModel carbonLoadModel = MapredCarbonOutputFormat.getLoadModel(configuration);
        ThreadLocalSessionInfo.unsetAll();
        CarbonTable carbonTable = carbonLoadModel.getCarbonDataLoadSchema().getCarbonTable();
        new CarbonIndexFileMergeWriter(carbonTable).mergeCarbonIndexFilesOfSegment(carbonLoadModel.getSegmentId(), carbonTable.getTablePath(), false, String.valueOf(carbonLoadModel.getFactTimeStamp()));
        SegmentFileStore.writeSegmentFile(carbonLoadModel.getCarbonDataLoadSchema().getCarbonTable(), carbonLoadModel.getSegmentId(), String.valueOf(carbonLoadModel.getFactTimeStamp()));
        CarbonTableOutputFormat.setLoadModel(configuration, carbonLoadModel);
        carbonOutputCommitter.commitJob(jobContext);
    } catch (Exception e) {
        LOGGER.error(e);
        throw e;
    }
}
Also used : CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) CarbonIndexFileMergeWriter(org.apache.carbondata.core.writer.CarbonIndexFileMergeWriter) Configuration(org.apache.hadoop.conf.Configuration) CarbonLoadModel(org.apache.carbondata.processing.loading.model.CarbonLoadModel) IOException(java.io.IOException)

Example 98 with CarbonTable

use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.

the class CarbonHiveSerDe method inferSchema.

private void inferSchema(Properties tbl, List<String> columnNames, List<TypeInfo> columnTypes) {
    if (columnNames.size() == 0 && columnTypes.size() == 0) {
        String external = tbl.getProperty("EXTERNAL");
        String location = CarbonUtil.checkAndAppendFileSystemURIScheme(tbl.getProperty(hive_metastoreConstants.META_TABLE_LOCATION));
        if (external != null && "TRUE".equals(external) && location != null) {
            String[] names = tbl.getProperty(hive_metastoreConstants.META_TABLE_NAME).split("\\.");
            if (names.length == 2) {
                AbsoluteTableIdentifier identifier = AbsoluteTableIdentifier.from(location, names[0], names[1]);
                String schemaPath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath());
                try {
                    TableInfo tableInfo = null;
                    if (!FileFactory.isFileExist(schemaPath)) {
                        tableInfo = SchemaReader.inferSchema(identifier, false);
                    } else {
                        tableInfo = SchemaReader.getTableInfo(identifier);
                    }
                    if (tableInfo != null) {
                        CarbonTable carbonTable = CarbonTable.buildFromTableInfo(tableInfo);
                        List<CarbonColumn> columns = carbonTable.getCreateOrderColumn();
                        for (CarbonColumn column : columns) {
                            columnNames.add(column.getColName());
                            columnTypes.add(HiveDataTypeUtils.convertCarbonDataTypeToHive(column));
                        }
                    }
                } catch (Exception ex) {
                    LOGGER.warn("Failed to infer schema: " + ex.getMessage());
                }
            }
        }
    }
}
Also used : CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) CarbonColumn(org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn) AbsoluteTableIdentifier(org.apache.carbondata.core.metadata.AbsoluteTableIdentifier) TableInfo(org.apache.carbondata.core.metadata.schema.table.TableInfo) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 99 with CarbonTable

use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.

the class MapredCarbonInputFormat method getQueryModel.

private QueryModel getQueryModel(Configuration configuration, String path) throws IOException, InvalidConfigurationException, SQLException {
    CarbonTable carbonTable = getCarbonTable(configuration, path);
    String projectionString = getProjection(configuration, carbonTable);
    String[] projectionColumns = projectionString.split(",");
    return new QueryModelBuilder(carbonTable).projectColumns(projectionColumns).filterExpression(getFilterPredicates(configuration)).dataConverter(new DataTypeConverterImpl()).build();
}
Also used : CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) QueryModelBuilder(org.apache.carbondata.core.scan.model.QueryModelBuilder) DataTypeConverterImpl(org.apache.carbondata.core.util.DataTypeConverterImpl)

Example 100 with CarbonTable

use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.

the class CarbondataPageSourceProvider method createPageSource.

@Override
public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<ColumnHandle> columns) {
    HiveSplit carbonSplit = checkType(split, HiveSplit.class, "split is not class HiveSplit");
    this.queryId = carbonSplit.getSchema().getProperty("queryId");
    if (this.queryId == null) {
        // Fall back to hive pagesource.
        return super.createPageSource(transactionHandle, session, split, columns);
    }
    Configuration configuration = this.hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session, carbonSplit.getDatabase(), carbonSplit.getTable()), new Path(carbonSplit.getSchema().getProperty("tablePath")));
    configuration = carbonTableReader.updateS3Properties(configuration);
    for (Map.Entry<Object, Object> entry : carbonSplit.getSchema().entrySet()) {
        configuration.set(entry.getKey().toString(), entry.getValue().toString());
    }
    CarbonTable carbonTable = getCarbonTable(carbonSplit, configuration);
    boolean isDirectVectorFill = carbonTableReader.config.getPushRowFilter() == null || carbonTableReader.config.getPushRowFilter().equalsIgnoreCase("false");
    return new CarbondataPageSource(carbonTable, queryId, carbonSplit, columns, configuration, isDirectVectorFill);
}
Also used : Path(org.apache.hadoop.fs.Path) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) HiveSplit(com.facebook.presto.hive.HiveSplit) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) HdfsEnvironment(com.facebook.presto.hive.HdfsEnvironment)

Aggregations

CarbonTable (org.apache.carbondata.core.metadata.schema.table.CarbonTable)101 ArrayList (java.util.ArrayList)36 IOException (java.io.IOException)31 LoadMetadataDetails (org.apache.carbondata.core.statusmanager.LoadMetadataDetails)19 AbsoluteTableIdentifier (org.apache.carbondata.core.metadata.AbsoluteTableIdentifier)18 ColumnSchema (org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema)16 Configuration (org.apache.hadoop.conf.Configuration)15 TableInfo (org.apache.carbondata.core.metadata.schema.table.TableInfo)14 Map (java.util.Map)13 CarbonFile (org.apache.carbondata.core.datastore.filesystem.CarbonFile)13 List (java.util.List)12 CarbonDimension (org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension)12 HashMap (java.util.HashMap)11 CarbonTablePath (org.apache.carbondata.core.util.path.CarbonTablePath)11 File (java.io.File)9 Expression (org.apache.carbondata.core.scan.expression.Expression)9 PartitionSpec (org.apache.carbondata.core.indexstore.PartitionSpec)8 CarbonInputSplit (org.apache.carbondata.hadoop.CarbonInputSplit)8 InputSplit (org.apache.hadoop.mapreduce.InputSplit)8 Test (org.junit.Test)8