use of org.apache.carbondata.core.datastore.page.ColumnPage in project carbondata by apache.
the class ColumnPageEncoder method encodeChildColumn.
private static EncodedColumnPage encodeChildColumn(byte[][] data) throws IOException, MemoryException {
TableSpec.ColumnSpec spec = TableSpec.ColumnSpec.newInstance("complex_inner_column", DataTypes.BYTE_ARRAY, ColumnType.COMPLEX);
ColumnPage page = ColumnPage.wrapByteArrayPage(spec, data);
ColumnPageEncoder encoder = new DirectCompressCodec(DataTypes.BYTE_ARRAY).createEncoder(null);
return encoder.encode(page);
}
use of org.apache.carbondata.core.datastore.page.ColumnPage in project carbondata by apache.
the class AdaptiveFloatingCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
final Compressor compressor = CompressorFactory.getInstance().getCompressor();
return new ColumnPageEncoder() {
@Override
protected byte[] encodeData(ColumnPage input) throws MemoryException, IOException {
if (encodedPage != null) {
throw new IllegalStateException("already encoded");
}
encodedPage = ColumnPage.newPage(input.getColumnSpec(), targetDataType, input.getPageSize());
input.convertValue(converter);
byte[] result = encodedPage.compress(compressor);
encodedPage.freeMemory();
return result;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<Encoding>();
encodings.add(Encoding.ADAPTIVE_FLOATING);
return encodings;
}
@Override
protected ColumnPageEncoderMeta getEncoderMeta(ColumnPage inputPage) {
return new ColumnPageEncoderMeta(inputPage.getColumnSpec(), targetDataType, stats, compressor.getName());
}
};
}
use of org.apache.carbondata.core.datastore.page.ColumnPage in project carbondata by apache.
the class DirectDictDimensionIndexCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
return new IndexStorageEncoder() {
@Override
void encodeIndexStorage(ColumnPage inputPage) {
IndexStorage indexStorage;
byte[][] data = inputPage.getByteArrayPage();
if (isInvertedIndex) {
indexStorage = new BlockIndexerStorageForShort(data, false, false, isSort);
} else {
indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
}
byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
super.compressedDataPage = compressor.compressByte(flattened);
super.indexStorage = indexStorage;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<>();
encodings.add(Encoding.DICTIONARY);
encodings.add(Encoding.RLE);
if (isInvertedIndex) {
encodings.add(Encoding.INVERTED_INDEX);
}
return encodings;
}
};
}
use of org.apache.carbondata.core.datastore.page.ColumnPage in project carbondata by apache.
the class HighCardDictDimensionIndexCodec method createEncoder.
@Override
public ColumnPageEncoder createEncoder(Map<String, String> parameter) {
return new IndexStorageEncoder() {
@Override
protected void encodeIndexStorage(ColumnPage input) {
IndexStorage indexStorage;
byte[][] data = input.getByteArrayPage();
if (isInvertedIndex) {
indexStorage = new BlockIndexerStorageForShort(data, false, true, isSort);
} else {
indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, true);
}
byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
super.compressedDataPage = compressor.compressByte(flattened);
super.indexStorage = indexStorage;
}
@Override
protected List<Encoding> getEncodingList() {
List<Encoding> encodings = new ArrayList<>();
if (indexStorage.getRowIdPageLengthInBytes() > 0) {
encodings.add(Encoding.INVERTED_INDEX);
}
return encodings;
}
};
}
use of org.apache.carbondata.core.datastore.page.ColumnPage in project carbondata by apache.
the class DataMapWriterListener method onPageAdded.
/**
* Pick corresponding column pages and add to all registered datamap
*
* @param pageId sequence number of page, start from 0
* @param tablePage page data
*/
public void onPageAdded(int blockletId, int pageId, TablePage tablePage) throws IOException {
Set<Map.Entry<List<String>, List<DataMapWriter>>> entries = registry.entrySet();
for (Map.Entry<List<String>, List<DataMapWriter>> entry : entries) {
List<String> indexedColumns = entry.getKey();
ColumnPage[] pages = new ColumnPage[indexedColumns.size()];
for (int i = 0; i < indexedColumns.size(); i++) {
pages[i] = tablePage.getColumnPage(indexedColumns.get(i));
}
List<DataMapWriter> writers = entry.getValue();
for (DataMapWriter writer : writers) {
writer.onPageAdded(blockletId, pageId, pages);
}
}
}
Aggregations