Search in sources :

Example 1 with ByteString

use of com.google.protobuf.ByteString in project hbase by apache.

the class SparkSQLPushDownFilter method parseFrom.

/**
   * @param pbBytes A pb serialized instance
   * @return An instance of SparkSQLPushDownFilter
   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
   */
@SuppressWarnings("unused")
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    SparkFilterProtos.SQLPredicatePushDownFilter proto;
    try {
        proto = SparkFilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    String encoder = proto.getEncoderClassName();
    BytesEncoder enc = JavaBytesEncoder.create(encoder);
    //Load DynamicLogicExpression
    DynamicLogicExpression dynamicLogicExpression = DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc);
    //Load valuesFromQuery
    final List<ByteString> valueFromQueryArrayList = proto.getValueFromQueryArrayList();
    byte[][] valueFromQueryArray = new byte[valueFromQueryArrayList.size()][];
    for (int i = 0; i < valueFromQueryArrayList.size(); i++) {
        valueFromQueryArray[i] = valueFromQueryArrayList.get(i).toByteArray();
    }
    //Load mapping from HBase family/qualifier to Spark SQL columnName
    HashMap<ByteArrayComparable, HashMap<ByteArrayComparable, String>> currentCellToColumnIndexMap = new HashMap<>();
    for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto.getCellToColumnMappingList()) {
        byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray();
        ByteArrayComparable familyByteComparable = new ByteArrayComparable(familyArray, 0, familyArray.length);
        HashMap<ByteArrayComparable, String> qualifierMap = currentCellToColumnIndexMap.get(familyByteComparable);
        if (qualifierMap == null) {
            qualifierMap = new HashMap<>();
            currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap);
        }
        byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray();
        ByteArrayComparable qualifierByteComparable = new ByteArrayComparable(qualifierArray, 0, qualifierArray.length);
        qualifierMap.put(qualifierByteComparable, sqlPredicatePushDownCellToColumnMapping.getColumnName());
    }
    return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, currentCellToColumnIndexMap, encoder);
}
Also used : HashMap(java.util.HashMap) ByteString(com.google.protobuf.ByteString) SparkFilterProtos(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) ByteString(com.google.protobuf.ByteString) BytesEncoder(org.apache.hadoop.hbase.spark.datasources.BytesEncoder) JavaBytesEncoder(org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 2 with ByteString

use of com.google.protobuf.ByteString in project bitcask-java by krestenkrab.

the class BitCaskTest method wrapTest.

@Test
public void wrapTest() throws Exception {
    BitCaskOptions opts = new BitCaskOptions();
    opts.max_file_size = 1;
    initDataset("/tmp/bc.test.wrap", opts, defaultDataset()).close();
    BitCask b = BitCask.open(new File("/tmp/bc.test.wrap"), new BitCaskOptions());
    for (Map.Entry<ByteString, ByteString> ents : defaultDataset().entrySet()) {
        assertEquals(ents.getValue(), b.get(ents.getKey()));
    }
    assertEquals(3, b.readable_files().length);
    b.close();
}
Also used : ByteString(com.google.protobuf.ByteString) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 3 with ByteString

use of com.google.protobuf.ByteString in project bitcask-java by krestenkrab.

the class BitCaskTest method initDataset.

private BitCask initDataset(String string, BitCaskOptions opts, Map<ByteString, ByteString> ds) throws Exception {
    File dir = new File(string);
    rmdir(dir);
    opts.read_write = true;
    BitCask b = BitCask.open(dir, opts);
    for (Map.Entry<ByteString, ByteString> ent : ds.entrySet()) {
        b.put(ent.getKey(), ent.getValue());
    }
    return b;
}
Also used : ByteString(com.google.protobuf.ByteString) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap)

Example 4 with ByteString

use of com.google.protobuf.ByteString in project bitcask-java by krestenkrab.

the class BitCaskTest method openTest.

@Test
public void openTest() throws Exception {
    initDataset("/tmp/bc.test.open", defaultDataset()).close();
    BitCask b = BitCask.open(new File("/tmp/bc.test.open"), new BitCaskOptions());
    final Map<ByteString, ByteString> l = contents(b);
    assertEquals(l, defaultDataset());
    b.close();
}
Also used : ByteString(com.google.protobuf.ByteString) File(java.io.File) Test(org.junit.Test)

Example 5 with ByteString

use of com.google.protobuf.ByteString in project hbase by apache.

the class ProtobufUtil method toScan.

/**
   * Convert a protocol buffer Scan to a client Scan
   *
   * @param proto the protocol buffer Scan to convert
   * @return the converted client Scan
   * @throws IOException
   */
public static Scan toScan(final ClientProtos.Scan proto) throws IOException {
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    boolean includeStartRow = true;
    boolean includeStopRow = false;
    if (proto.hasStartRow()) {
        startRow = proto.getStartRow().toByteArray();
    }
    if (proto.hasStopRow()) {
        stopRow = proto.getStopRow().toByteArray();
    }
    if (proto.hasIncludeStartRow()) {
        includeStartRow = proto.getIncludeStartRow();
    }
    if (proto.hasIncludeStopRow()) {
        includeStopRow = proto.getIncludeStopRow();
    }
    Scan scan = new Scan().withStartRow(startRow, includeStartRow).withStopRow(stopRow, includeStopRow);
    if (proto.hasCacheBlocks()) {
        scan.setCacheBlocks(proto.getCacheBlocks());
    }
    if (proto.hasMaxVersions()) {
        scan.setMaxVersions(proto.getMaxVersions());
    }
    if (proto.hasStoreLimit()) {
        scan.setMaxResultsPerColumnFamily(proto.getStoreLimit());
    }
    if (proto.hasStoreOffset()) {
        scan.setRowOffsetPerColumnFamily(proto.getStoreOffset());
    }
    if (proto.hasLoadColumnFamiliesOnDemand()) {
        scan.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
    }
    if (proto.getCfTimeRangeCount() > 0) {
        for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
            TimeRange timeRange = protoToTimeRange(cftr.getTimeRange());
            scan.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange);
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        scan.setTimeRange(timeRange);
    }
    if (proto.hasFilter()) {
        FilterProtos.Filter filter = proto.getFilter();
        scan.setFilter(ProtobufUtil.toFilter(filter));
    }
    if (proto.hasBatchSize()) {
        scan.setBatch(proto.getBatchSize());
    }
    if (proto.hasMaxResultSize()) {
        scan.setMaxResultSize(proto.getMaxResultSize());
    }
    if (proto.hasSmall()) {
        scan.setSmall(proto.getSmall());
    }
    if (proto.hasAllowPartialResults()) {
        scan.setAllowPartialResults(proto.getAllowPartialResults());
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    if (proto.getColumnCount() > 0) {
        for (Column column : proto.getColumnList()) {
            byte[] family = column.getFamily().toByteArray();
            if (column.getQualifierCount() > 0) {
                for (ByteString qualifier : column.getQualifierList()) {
                    scan.addColumn(family, qualifier.toByteArray());
                }
            } else {
                scan.addFamily(family);
            }
        }
    }
    if (proto.hasReversed()) {
        scan.setReversed(proto.getReversed());
    }
    if (proto.hasConsistency()) {
        scan.setConsistency(toConsistency(proto.getConsistency()));
    }
    if (proto.hasCaching()) {
        scan.setCaching(proto.getCaching());
    }
    if (proto.hasMvccReadPoint()) {
        PackagePrivateFieldAccessor.setMvccReadPoint(scan, proto.getMvccReadPoint());
    }
    if (scan.isSmall()) {
        scan.setReadType(Scan.ReadType.PREAD);
    } else if (proto.hasReadType()) {
        scan.setReadType(toReadType(proto.getReadType()));
    }
    return scan;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) ByteString(com.google.protobuf.ByteString) FilterProtos(org.apache.hadoop.hbase.protobuf.generated.FilterProtos) Scan(org.apache.hadoop.hbase.client.Scan) HBaseProtos(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos)

Aggregations

ByteString (com.google.protobuf.ByteString)406 Test (org.junit.Test)143 ArrayList (java.util.ArrayList)65 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)63 HashMap (java.util.HashMap)41 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)40 IOException (java.io.IOException)37 List (java.util.List)33 Map (java.util.Map)33 ServerRequest (com.pokegoapi.main.ServerRequest)17 ExecutionException (java.util.concurrent.ExecutionException)16 AnnotateImageRequest (com.google.cloud.vision.v1.AnnotateImageRequest)14 AnnotateImageResponse (com.google.cloud.vision.v1.AnnotateImageResponse)14 BatchAnnotateImagesResponse (com.google.cloud.vision.v1.BatchAnnotateImagesResponse)14 Feature (com.google.cloud.vision.v1.Feature)14 Image (com.google.cloud.vision.v1.Image)14 ImageAnnotatorClient (com.google.cloud.vision.v1.ImageAnnotatorClient)14 FileInputStream (java.io.FileInputStream)13 ByteBuffer (java.nio.ByteBuffer)13 WebImage (com.google.cloud.vision.v1.WebDetection.WebImage)12