use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project hbase by apache.
the class SparkSQLPushDownFilter method parseFrom.
/**
* @param pbBytes A pb serialized instance
* @return An instance of SparkSQLPushDownFilter
* @throws org.apache.hadoop.hbase.exceptions.DeserializationException
*/
@SuppressWarnings("unused")
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
SparkFilterProtos.SQLPredicatePushDownFilter proto;
try {
proto = SparkFilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
String encoder = proto.getEncoderClassName();
BytesEncoder enc = JavaBytesEncoder.create(encoder);
//Load DynamicLogicExpression
DynamicLogicExpression dynamicLogicExpression = DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc);
//Load valuesFromQuery
final List<ByteString> valueFromQueryArrayList = proto.getValueFromQueryArrayList();
byte[][] valueFromQueryArray = new byte[valueFromQueryArrayList.size()][];
for (int i = 0; i < valueFromQueryArrayList.size(); i++) {
valueFromQueryArray[i] = valueFromQueryArrayList.get(i).toByteArray();
}
//Load mapping from HBase family/qualifier to Spark SQL columnName
HashMap<ByteArrayComparable, HashMap<ByteArrayComparable, String>> currentCellToColumnIndexMap = new HashMap<>();
for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto.getCellToColumnMappingList()) {
byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray();
ByteArrayComparable familyByteComparable = new ByteArrayComparable(familyArray, 0, familyArray.length);
HashMap<ByteArrayComparable, String> qualifierMap = currentCellToColumnIndexMap.get(familyByteComparable);
if (qualifierMap == null) {
qualifierMap = new HashMap<>();
currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap);
}
byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray();
ByteArrayComparable qualifierByteComparable = new ByteArrayComparable(qualifierArray, 0, qualifierArray.length);
qualifierMap.put(qualifierByteComparable, sqlPredicatePushDownCellToColumnMapping.getColumnName());
}
return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, currentCellToColumnIndexMap, encoder);
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project bitcask-java by krestenkrab.
the class BitCaskTest method wrapTest.
@Test
public void wrapTest() throws Exception {
BitCaskOptions opts = new BitCaskOptions();
opts.max_file_size = 1;
initDataset("/tmp/bc.test.wrap", opts, defaultDataset()).close();
BitCask b = BitCask.open(new File("/tmp/bc.test.wrap"), new BitCaskOptions());
for (Map.Entry<ByteString, ByteString> ents : defaultDataset().entrySet()) {
assertEquals(ents.getValue(), b.get(ents.getKey()));
}
assertEquals(3, b.readable_files().length);
b.close();
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project bitcask-java by krestenkrab.
the class BitCaskTest method initDataset.
private BitCask initDataset(String string, BitCaskOptions opts, Map<ByteString, ByteString> ds) throws Exception {
File dir = new File(string);
rmdir(dir);
opts.read_write = true;
BitCask b = BitCask.open(dir, opts);
for (Map.Entry<ByteString, ByteString> ent : ds.entrySet()) {
b.put(ent.getKey(), ent.getValue());
}
return b;
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project bitcask-java by krestenkrab.
the class BitCaskTest method openTest.
@Test
public void openTest() throws Exception {
initDataset("/tmp/bc.test.open", defaultDataset()).close();
BitCask b = BitCask.open(new File("/tmp/bc.test.open"), new BitCaskOptions());
final Map<ByteString, ByteString> l = contents(b);
assertEquals(l, defaultDataset());
b.close();
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project hbase by apache.
the class ProtobufUtil method toScan.
/**
* Convert a protocol buffer Scan to a client Scan
*
* @param proto the protocol buffer Scan to convert
* @return the converted client Scan
* @throws IOException
*/
public static Scan toScan(final ClientProtos.Scan proto) throws IOException {
byte[] startRow = HConstants.EMPTY_START_ROW;
byte[] stopRow = HConstants.EMPTY_END_ROW;
boolean includeStartRow = true;
boolean includeStopRow = false;
if (proto.hasStartRow()) {
startRow = proto.getStartRow().toByteArray();
}
if (proto.hasStopRow()) {
stopRow = proto.getStopRow().toByteArray();
}
if (proto.hasIncludeStartRow()) {
includeStartRow = proto.getIncludeStartRow();
}
if (proto.hasIncludeStopRow()) {
includeStopRow = proto.getIncludeStopRow();
}
Scan scan = new Scan().withStartRow(startRow, includeStartRow).withStopRow(stopRow, includeStopRow);
if (proto.hasCacheBlocks()) {
scan.setCacheBlocks(proto.getCacheBlocks());
}
if (proto.hasMaxVersions()) {
scan.setMaxVersions(proto.getMaxVersions());
}
if (proto.hasStoreLimit()) {
scan.setMaxResultsPerColumnFamily(proto.getStoreLimit());
}
if (proto.hasStoreOffset()) {
scan.setRowOffsetPerColumnFamily(proto.getStoreOffset());
}
if (proto.hasLoadColumnFamiliesOnDemand()) {
scan.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
}
if (proto.getCfTimeRangeCount() > 0) {
for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
TimeRange timeRange = protoToTimeRange(cftr.getTimeRange());
scan.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange);
}
}
if (proto.hasTimeRange()) {
TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
scan.setTimeRange(timeRange);
}
if (proto.hasFilter()) {
FilterProtos.Filter filter = proto.getFilter();
scan.setFilter(ProtobufUtil.toFilter(filter));
}
if (proto.hasBatchSize()) {
scan.setBatch(proto.getBatchSize());
}
if (proto.hasMaxResultSize()) {
scan.setMaxResultSize(proto.getMaxResultSize());
}
if (proto.hasSmall()) {
scan.setSmall(proto.getSmall());
}
if (proto.hasAllowPartialResults()) {
scan.setAllowPartialResults(proto.getAllowPartialResults());
}
for (NameBytesPair attribute : proto.getAttributeList()) {
scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
}
if (proto.getColumnCount() > 0) {
for (Column column : proto.getColumnList()) {
byte[] family = column.getFamily().toByteArray();
if (column.getQualifierCount() > 0) {
for (ByteString qualifier : column.getQualifierList()) {
scan.addColumn(family, qualifier.toByteArray());
}
} else {
scan.addFamily(family);
}
}
}
if (proto.hasReversed()) {
scan.setReversed(proto.getReversed());
}
if (proto.hasConsistency()) {
scan.setConsistency(toConsistency(proto.getConsistency()));
}
if (proto.hasCaching()) {
scan.setCaching(proto.getCaching());
}
if (proto.hasMvccReadPoint()) {
PackagePrivateFieldAccessor.setMvccReadPoint(scan, proto.getMvccReadPoint());
}
if (scan.isSmall()) {
scan.setReadType(Scan.ReadType.PREAD);
} else if (proto.hasReadType()) {
scan.setReadType(toReadType(proto.getReadType()));
}
return scan;
}
Aggregations