use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbonInputFormat method getQueryModel.
public QueryModel getQueryModel(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException {
Configuration configuration = taskAttemptContext.getConfiguration();
CarbonTable carbonTable = getCarbonTable(configuration);
// getting the table absoluteTableIdentifier from the carbonTable
// to avoid unnecessary deserialization
AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
// query plan includes projection column
String projection = getColumnProjection(configuration);
CarbonQueryPlan queryPlan = CarbonInputFormatUtil.createQueryPlan(carbonTable, projection);
QueryModel queryModel = QueryModel.createModel(identifier, queryPlan, carbonTable);
// set the filter to the query model in order to filter blocklet before scan
Expression filter = getFilterPredicates(configuration);
CarbonInputFormatUtil.processFilterExpression(filter, carbonTable);
FilterResolverIntf filterIntf = CarbonInputFormatUtil.resolveFilter(filter, identifier);
queryModel.setFilterExpressionResolverTree(filterIntf);
// update the file level index store if there are invalid segment
if (inputSplit instanceof CarbonMultiBlockSplit) {
CarbonMultiBlockSplit split = (CarbonMultiBlockSplit) inputSplit;
List<String> invalidSegments = split.getAllSplits().get(0).getInvalidSegments();
if (invalidSegments.size() > 0) {
queryModel.setInvalidSegmentIds(invalidSegments);
}
List<UpdateVO> invalidTimestampRangeList = split.getAllSplits().get(0).getInvalidTimestampRange();
if ((null != invalidTimestampRangeList) && (invalidTimestampRangeList.size() > 0)) {
queryModel.setInvalidBlockForSegmentId(invalidTimestampRangeList);
}
}
return queryModel;
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class IncrementalColumnDictionaryGeneratorTest method writeDictionaryData.
@Test
public void writeDictionaryData() throws Exception {
//Create required column schema
ColumnSchema columnSchema = new ColumnSchema();
columnSchema.setColumnName("empNameCol");
columnSchema.setColumnUniqueId("empNameCol");
CarbonDimension carbonDimension = new CarbonDimension(columnSchema, 0, 0, 0, 0, 0);
// Create the generator and add the keys to dictionary
IncrementalColumnDictionaryGenerator generator = new IncrementalColumnDictionaryGenerator(carbonDimension, 10);
// Create a table schema for saving the dictionary
TableSchema tableSchema = new TableSchema();
tableSchema.setTableName("TestTable");
tableSchema.setListOfColumns(Arrays.asList(columnSchema));
CarbonMetadata metadata = CarbonMetadata.getInstance();
TableInfo tableInfo = new TableInfo();
tableInfo.setFactTable(tableSchema);
tableInfo.setTableUniqueName("TestTable");
tableInfo.setDatabaseName("test");
String storePath = System.getProperty("java.io.tmpdir") + "/tmp";
File dictPath = new File(storePath + "/test/TestTable/Metadata/");
System.out.print(dictPath.mkdirs());
tableInfo.setStorePath(storePath);
CarbonTable carbonTable = new CarbonTable();
carbonTable.loadCarbonTable(tableInfo);
// Add the table to metadata
metadata.addCarbonTable(carbonTable);
/// Write the dictionary and verify whether its written successfully
generator.writeDictionaryData("TestTable");
File dictionaryFile = new File(dictPath, "empNameCol.dict");
System.out.println(dictionaryFile.getCanonicalPath());
assertTrue(dictionaryFile.exists());
dictionaryFile.delete();
// cleanup created files
metadata.removeTable(carbonTable.getTableUniqueName());
cleanUpDirectory(new File(storePath));
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbonCompactionUtil method getNextTableToCompact.
/**
* This will check if any compaction request has been received for any table.
*
* @param tableMetas
* @return
*/
public static TableMeta getNextTableToCompact(TableMeta[] tableMetas, List<CarbonTableIdentifier> skipList) {
for (TableMeta table : tableMetas) {
CarbonTable ctable = table.carbonTable;
String metadataPath = ctable.getMetaDataFilepath();
// present in the skip list.
if (CarbonCompactionUtil.isCompactionRequiredForTable(metadataPath) && !skipList.contains(table.carbonTableIdentifier)) {
return table;
}
}
return null;
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbondataRecordSetProvider method getRecordSet.
@Override
public RecordSet getRecordSet(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<? extends ColumnHandle> columns) {
requireNonNull(split, "split is null");
requireNonNull(columns, "columns is null");
CarbondataSplit carbondataSplit = checkType(split, CarbondataSplit.class, "split is not class CarbondataSplit");
checkArgument(carbondataSplit.getConnectorId().equals(connectorId), "split is not for this connector");
String targetCols = "";
// Convert all columns handles
ImmutableList.Builder<CarbondataColumnHandle> handles = ImmutableList.builder();
for (ColumnHandle handle : columns) {
handles.add(checkType(handle, CarbondataColumnHandle.class, "handle"));
targetCols += ((CarbondataColumnHandle) handle).getColumnName() + ",";
}
// Build column projection(check the column order)
if (targetCols.length() > 0) {
targetCols = targetCols.substring(0, targetCols.length() - 1);
} else {
targetCols = null;
}
//String cols = String.join(",", columns.stream().map(a -> ((CarbondataColumnHandle)a).getColumnName()).collect(Collectors.toList()));
CarbonTableCacheModel tableCacheModel = carbonTableReader.getCarbonCache(carbondataSplit.getSchemaTableName());
checkNotNull(tableCacheModel, "tableCacheModel should not be null");
checkNotNull(tableCacheModel.carbonTable, "tableCacheModel.carbonTable should not be null");
checkNotNull(tableCacheModel.tableInfo, "tableCacheModel.tableInfo should not be null");
// Build Query Model
CarbonTable targetTable = tableCacheModel.carbonTable;
CarbonQueryPlan queryPlan = CarbonInputFormatUtil.createQueryPlan(targetTable, targetCols);
QueryModel queryModel = QueryModel.createModel(targetTable.getAbsoluteTableIdentifier(), queryPlan, targetTable);
// Push down filter
fillFilter2QueryModel(queryModel, carbondataSplit.getConstraints(), targetTable);
// Return new record set
return new CarbondataRecordSet(targetTable, session, carbondataSplit, handles.build(), queryModel);
}
use of org.apache.carbondata.core.metadata.schema.table.CarbonTable in project carbondata by apache.
the class CarbondataMetadata method getColumnHandles.
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorSession session, ConnectorTableHandle tableHandle) {
CarbondataTableHandle handle = checkType(tableHandle, CarbondataTableHandle.class, "tableHandle");
checkArgument(handle.getConnectorId().equals(connectorId), "tableHandle is not for this connector");
String schemaName = handle.getSchemaTableName().getSchemaName();
if (!listSchemaNamesInternal().contains(schemaName)) {
throw new SchemaNotFoundException(schemaName);
}
//CarbonTable(official struct) is stored in CarbonMetadata(official struct)
CarbonTable cb = carbonTableReader.getTable(handle.getSchemaTableName());
if (cb == null) {
throw new TableNotFoundException(handle.getSchemaTableName());
}
ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder();
String tableName = handle.getSchemaTableName().getTableName();
for (CarbonDimension column : cb.getDimensionByTableName(tableName)) {
ColumnSchema cs = column.getColumnSchema();
int complex = column.getComplexTypeOrdinal();
column.getNumberOfChild();
column.getListOfChildDimensions();
Type spiType = CarbondataType2SpiMapper(cs);
columnHandles.put(cs.getColumnName(), new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, column.getSchemaOrdinal(), column.getKeyOrdinal(), column.getColumnGroupOrdinal(), false, cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale()));
}
for (CarbonMeasure measure : cb.getMeasureByTableName(tableName)) {
ColumnSchema cs = measure.getColumnSchema();
Type spiType = CarbondataType2SpiMapper(cs);
columnHandles.put(cs.getColumnName(), new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, cs.getSchemaOrdinal(), measure.getOrdinal(), cs.getColumnGroupId(), true, cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale()));
}
//should i cache it?
columnHandleMap = columnHandles.build();
return columnHandleMap;
}
Aggregations