use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class HiveCarbonUtil method getCarbonTable.
public static CarbonTable getCarbonTable(Configuration tableProperties) throws SQLException {
String[] tableUniqueName = tableProperties.get("name").split("\\.");
String databaseName = tableUniqueName[0];
String tableName = tableUniqueName[1];
String tablePath = tableProperties.get(hive_metastoreConstants.META_TABLE_LOCATION);
String columns = tableProperties.get(hive_metastoreConstants.META_TABLE_COLUMNS);
String sortColumns = tableProperties.get("sort_columns");
String columnTypes = tableProperties.get(hive_metastoreConstants.META_TABLE_COLUMN_TYPES);
String partitionColumns = tableProperties.get(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
String partitionColumnTypes = tableProperties.get(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
if (partitionColumns != null) {
columns = columns + "," + partitionColumns;
columnTypes = columnTypes + ":" + partitionColumnTypes;
}
String[][] validatedColumnsAndTypes = validateColumnsAndTypes(columns, columnTypes);
CarbonTable carbonTable = CarbonTable.buildFromTableInfo(HiveCarbonUtil.getTableInfo(tableName, databaseName, tablePath, sortColumns, validatedColumnsAndTypes[0], validatedColumnsAndTypes[1], new ArrayList<>()));
carbonTable.setTransactionalTable(false);
return carbonTable;
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class MapredCarbonOutputCommitter method commitJob.
@Override
public void commitJob(JobContext jobContext) throws IOException {
try {
Configuration configuration = jobContext.getConfiguration();
CarbonLoadModel carbonLoadModel = MapredCarbonOutputFormat.getLoadModel(configuration);
ThreadLocalSessionInfo.unsetAll();
CarbonTable carbonTable = carbonLoadModel.getCarbonDataLoadSchema().getCarbonTable();
new CarbonIndexFileMergeWriter(carbonTable).mergeCarbonIndexFilesOfSegment(carbonLoadModel.getSegmentId(), carbonTable.getTablePath(), false, String.valueOf(carbonLoadModel.getFactTimeStamp()));
SegmentFileStore.writeSegmentFile(carbonLoadModel.getCarbonDataLoadSchema().getCarbonTable(), carbonLoadModel.getSegmentId(), String.valueOf(carbonLoadModel.getFactTimeStamp()));
CarbonTableOutputFormat.setLoadModel(configuration, carbonLoadModel);
carbonOutputCommitter.commitJob(jobContext);
} catch (Exception e) {
LOGGER.error(e);
throw e;
}
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbonHiveSerDe method inferSchema.
private void inferSchema(Properties tbl, List<String> columnNames, List<TypeInfo> columnTypes) {
if (columnNames.size() == 0 && columnTypes.size() == 0) {
String external = tbl.getProperty("EXTERNAL");
String location = CarbonUtil.checkAndAppendFileSystemURIScheme(tbl.getProperty(hive_metastoreConstants.META_TABLE_LOCATION));
if (external != null && "TRUE".equals(external) && location != null) {
String[] names = tbl.getProperty(hive_metastoreConstants.META_TABLE_NAME).split("\\.");
if (names.length == 2) {
AbsoluteTableIdentifier identifier = AbsoluteTableIdentifier.from(location, names[0], names[1]);
String schemaPath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath());
try {
TableInfo tableInfo = null;
if (!FileFactory.isFileExist(schemaPath)) {
tableInfo = SchemaReader.inferSchema(identifier, false);
} else {
tableInfo = SchemaReader.getTableInfo(identifier);
}
if (tableInfo != null) {
CarbonTable carbonTable = CarbonTable.buildFromTableInfo(tableInfo);
List<CarbonColumn> columns = carbonTable.getCreateOrderColumn();
for (CarbonColumn column : columns) {
columnNames.add(column.getColName());
columnTypes.add(HiveDataTypeUtils.convertCarbonDataTypeToHive(column));
}
}
} catch (Exception ex) {
LOGGER.warn("Failed to infer schema: " + ex.getMessage());
}
}
}
}
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class MapredCarbonInputFormat method getQueryModel.
private QueryModel getQueryModel(Configuration configuration, String path) throws IOException, InvalidConfigurationException, SQLException {
CarbonTable carbonTable = getCarbonTable(configuration, path);
String projectionString = getProjection(configuration, carbonTable);
String[] projectionColumns = projectionString.split(",");
return new QueryModelBuilder(carbonTable).projectColumns(projectionColumns).filterExpression(getFilterPredicates(configuration)).dataConverter(new DataTypeConverterImpl()).build();
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbondataPageSourceProvider method createPageSource.
@Override
public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<ColumnHandle> columns) {
HiveSplit carbonSplit = checkType(split, HiveSplit.class, "split is not class HiveSplit");
this.queryId = carbonSplit.getSchema().getProperty("queryId");
if (this.queryId == null) {
// Fall back to hive pagesource.
return super.createPageSource(transactionHandle, session, split, columns);
}
Configuration configuration = this.hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session, carbonSplit.getDatabase(), carbonSplit.getTable()), new Path(carbonSplit.getSchema().getProperty("tablePath")));
configuration = carbonTableReader.updateS3Properties(configuration);
for (Map.Entry<Object, Object> entry : carbonSplit.getSchema().entrySet()) {
configuration.set(entry.getKey().toString(), entry.getValue().toString());
}
CarbonTable carbonTable = getCarbonTable(carbonSplit, configuration);
boolean isDirectVectorFill = carbonTableReader.config.getPushRowFilter() == null || carbonTableReader.config.getPushRowFilter().equalsIgnoreCase("false");
return new CarbondataPageSource(carbonTable, queryId, carbonSplit, columns, configuration, isDirectVectorFill);
}
Aggregations