Search in sources :

Example 16 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project phoenix by apache.

the class ScanRanges method getDescTimeRange.

public static TimeRange getDescTimeRange(KeyRange lowestKeyRange, KeyRange highestKeyRange, Field f) throws IOException {
    boolean lowerUnbound = lowestKeyRange.lowerUnbound();
    boolean lowerInclusive = lowestKeyRange.isLowerInclusive();
    boolean upperUnbound = highestKeyRange.upperUnbound();
    boolean upperInclusive = highestKeyRange.isUpperInclusive();
    PDataCodec codec = PLong.INSTANCE.getCodec();
    long low = lowerUnbound ? -1 : codec.decodeLong(lowestKeyRange.getLowerRange(), 0, SortOrder.DESC);
    long high = upperUnbound ? -1 : codec.decodeLong(highestKeyRange.getUpperRange(), 0, SortOrder.DESC);
    long newHigh;
    long newLow;
    if (!lowerUnbound && !upperUnbound) {
        newHigh = lowerInclusive ? safelyIncrement(low) : low;
        newLow = upperInclusive ? high : safelyIncrement(high);
        return new TimeRange(newLow, newHigh);
    } else if (!lowerUnbound && upperUnbound) {
        newHigh = lowerInclusive ? safelyIncrement(low) : low;
        newLow = 0;
        return new TimeRange(newLow, newHigh);
    } else if (lowerUnbound && !upperUnbound) {
        newLow = upperInclusive ? high : safelyIncrement(high);
        newHigh = HConstants.LATEST_TIMESTAMP;
        return new TimeRange(newLow, newHigh);
    } else {
        newLow = 0;
        newHigh = HConstants.LATEST_TIMESTAMP;
        return new TimeRange(newLow, newHigh);
    }
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) PDataCodec(org.apache.phoenix.schema.types.PDataType.PDataCodec)

Example 17 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class StoreFileScanner method shouldUseScanner.

@Override
public boolean shouldUseScanner(Scan scan, HStore store, long oldestUnexpiredTS) {
    // if the file has no entries, no need to validate or create a scanner.
    byte[] cf = store.getColumnFamilyDescriptor().getName();
    TimeRange timeRange = scan.getColumnFamilyTimeRange().get(cf);
    if (timeRange == null) {
        timeRange = scan.getTimeRange();
    }
    return reader.passesTimerangeFilter(timeRange, oldestUnexpiredTS) && reader.passesKeyRangeFilter(scan) && reader.passesBloomFilter(scan, scan.getFamilyMap().get(cf));
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange)

Example 18 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class ProtobufUtil method toAppend.

/**
 * Convert a protocol buffer Mutate to an Append
 * @param cellScanner
 * @param proto the protocol buffer Mutate to convert
 * @return the converted client Append
 * @throws IOException
 */
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.APPEND : type.name();
    Append append = toDelta((Bytes row) -> new Append(row.get(), row.getOffset(), row.getLength()), Append::add, proto, cellScanner);
    if (proto.hasTimeRange()) {
        TimeRange timeRange = toTimeRange(proto.getTimeRange());
        append.setTimeRange(timeRange.getMin(), timeRange.getMax());
    }
    return append;
}
Also used : Bytes(org.apache.hadoop.hbase.util.Bytes) TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) Append(org.apache.hadoop.hbase.client.Append)

Example 19 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class ProtobufUtil method toScan.

/**
 * Convert a protocol buffer Scan to a client Scan
 *
 * @param proto the protocol buffer Scan to convert
 * @return the converted client Scan
 * @throws IOException
 */
public static Scan toScan(final ClientProtos.Scan proto) throws IOException {
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    boolean includeStartRow = true;
    boolean includeStopRow = false;
    if (proto.hasStartRow()) {
        startRow = proto.getStartRow().toByteArray();
    }
    if (proto.hasStopRow()) {
        stopRow = proto.getStopRow().toByteArray();
    }
    if (proto.hasIncludeStartRow()) {
        includeStartRow = proto.getIncludeStartRow();
    }
    if (proto.hasIncludeStopRow()) {
        includeStopRow = proto.getIncludeStopRow();
    } else {
        // old client without this flag, we should consider start=end as a get.
        if (ClientUtil.areScanStartRowAndStopRowEqual(startRow, stopRow)) {
            includeStopRow = true;
        }
    }
    Scan scan = new Scan().withStartRow(startRow, includeStartRow).withStopRow(stopRow, includeStopRow);
    if (proto.hasCacheBlocks()) {
        scan.setCacheBlocks(proto.getCacheBlocks());
    }
    if (proto.hasMaxVersions()) {
        scan.readVersions(proto.getMaxVersions());
    }
    if (proto.hasStoreLimit()) {
        scan.setMaxResultsPerColumnFamily(proto.getStoreLimit());
    }
    if (proto.hasStoreOffset()) {
        scan.setRowOffsetPerColumnFamily(proto.getStoreOffset());
    }
    if (proto.hasLoadColumnFamiliesOnDemand()) {
        scan.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
    }
    if (proto.getCfTimeRangeCount() > 0) {
        for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
            TimeRange timeRange = toTimeRange(cftr.getTimeRange());
            scan.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange.getMin(), timeRange.getMax());
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = toTimeRange(proto.getTimeRange());
        scan.setTimeRange(timeRange.getMin(), timeRange.getMax());
    }
    if (proto.hasFilter()) {
        FilterProtos.Filter filter = proto.getFilter();
        scan.setFilter(ProtobufUtil.toFilter(filter));
    }
    if (proto.hasBatchSize()) {
        scan.setBatch(proto.getBatchSize());
    }
    if (proto.hasMaxResultSize()) {
        scan.setMaxResultSize(proto.getMaxResultSize());
    }
    if (proto.hasAllowPartialResults()) {
        scan.setAllowPartialResults(proto.getAllowPartialResults());
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    if (proto.getColumnCount() > 0) {
        for (Column column : proto.getColumnList()) {
            byte[] family = column.getFamily().toByteArray();
            if (column.getQualifierCount() > 0) {
                for (ByteString qualifier : column.getQualifierList()) {
                    scan.addColumn(family, qualifier.toByteArray());
                }
            } else {
                scan.addFamily(family);
            }
        }
    }
    if (proto.hasReversed()) {
        scan.setReversed(proto.getReversed());
    }
    if (proto.hasConsistency()) {
        scan.setConsistency(toConsistency(proto.getConsistency()));
    }
    if (proto.hasCaching()) {
        scan.setCaching(proto.getCaching());
    }
    if (proto.hasMvccReadPoint()) {
        PackagePrivateFieldAccessor.setMvccReadPoint(scan, proto.getMvccReadPoint());
    }
    if (proto.hasReadType()) {
        scan.setReadType(toReadType(proto.getReadType()));
    }
    if (proto.getNeedCursorResult()) {
        scan.setNeedCursorResult(true);
    }
    return scan;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) Scan(org.apache.hadoop.hbase.client.Scan) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)

Example 20 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class ProtobufUtil method toCheckAndMutate.

public static CheckAndMutate toCheckAndMutate(ClientProtos.Condition condition, List<Mutation> mutations) throws IOException {
    assert mutations.size() > 0;
    byte[] row = condition.getRow().toByteArray();
    CheckAndMutate.Builder builder = CheckAndMutate.newBuilder(row);
    Filter filter = condition.hasFilter() ? ProtobufUtil.toFilter(condition.getFilter()) : null;
    if (filter != null) {
        builder.ifMatches(filter);
    } else {
        builder.ifMatches(condition.getFamily().toByteArray(), condition.getQualifier().toByteArray(), CompareOperator.valueOf(condition.getCompareType().name()), ProtobufUtil.toComparator(condition.getComparator()).getValue());
    }
    TimeRange timeRange = condition.hasTimeRange() ? ProtobufUtil.toTimeRange(condition.getTimeRange()) : TimeRange.allTime();
    builder.timeRange(timeRange);
    try {
        if (mutations.size() == 1) {
            Mutation m = mutations.get(0);
            if (m instanceof Put) {
                return builder.build((Put) m);
            } else if (m instanceof Delete) {
                return builder.build((Delete) m);
            } else if (m instanceof Increment) {
                return builder.build((Increment) m);
            } else if (m instanceof Append) {
                return builder.build((Append) m);
            } else {
                throw new DoNotRetryIOException("Unsupported mutate type: " + m.getClass().getSimpleName().toUpperCase());
            }
        } else {
            return builder.build(new RowMutations(mutations.get(0).getRow()).add(mutations));
        }
    } catch (IllegalArgumentException e) {
        throw new DoNotRetryIOException(e.getMessage());
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) CheckAndMutate(org.apache.hadoop.hbase.client.CheckAndMutate) Put(org.apache.hadoop.hbase.client.Put) RowMutations(org.apache.hadoop.hbase.client.RowMutations) TimeRange(org.apache.hadoop.hbase.io.TimeRange) Append(org.apache.hadoop.hbase.client.Append) Filter(org.apache.hadoop.hbase.filter.Filter) Increment(org.apache.hadoop.hbase.client.Increment) Mutation(org.apache.hadoop.hbase.client.Mutation)

Aggregations

TimeRange (org.apache.hadoop.hbase.io.TimeRange)45 Test (org.junit.Test)11 Map (java.util.Map)10 Get (org.apache.hadoop.hbase.client.Get)10 Scan (org.apache.hadoop.hbase.client.Scan)10 Cell (org.apache.hadoop.hbase.Cell)8 NavigableSet (java.util.NavigableSet)7 NameBytesPair (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)7 HashMap (java.util.HashMap)6 Filter (org.apache.hadoop.hbase.filter.Filter)6 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)6 ByteString (com.google.protobuf.ByteString)5 ArrayList (java.util.ArrayList)5 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)5 Put (org.apache.hadoop.hbase.client.Put)5 List (java.util.List)4 Increment (org.apache.hadoop.hbase.client.Increment)4 Result (org.apache.hadoop.hbase.client.Result)4 Column (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)4 Column (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)4