Search in sources :

Example 1 with EncodedColumnQualiferCellsList

use of org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList in project phoenix by apache.

the class GroupedAggregateRegionObserver method scanOrdered.

/**
     * Used for an aggregate query in which the key order match the group by key order. In this
     * case, we can do the aggregation as we scan, by detecting when the group by key changes.
     * @param limit TODO
     * @throws IOException
     */
private RegionScanner scanOrdered(final ObserverContext<RegionCoprocessorEnvironment> c, final Scan scan, final RegionScanner scanner, final List<Expression> expressions, final ServerAggregators aggregators, final long limit) throws IOException {
    if (logger.isDebugEnabled()) {
        logger.debug(LogUtil.addCustomAnnotations("Grouped aggregation over ordered rows with scan " + scan + ", group by " + expressions + ", aggregators " + aggregators, ScanUtil.getCustomAnnotations(scan)));
    }
    final Pair<Integer, Integer> minMaxQualifiers = EncodedColumnsUtil.getMinMaxQualifiersFromScan(scan);
    final boolean useQualifierAsIndex = EncodedColumnsUtil.useQualifierAsIndex(minMaxQualifiers);
    return new BaseRegionScanner(scanner) {

        private long rowCount = 0;

        private ImmutableBytesPtr currentKey = null;

        @Override
        public boolean next(List<Cell> results) throws IOException {
            boolean hasMore;
            boolean atLimit;
            boolean aggBoundary = false;
            Tuple result = useQualifierAsIndex ? new PositionBasedMultiKeyValueTuple() : new MultiKeyValueTuple();
            ImmutableBytesPtr key = null;
            Aggregator[] rowAggregators = aggregators.getAggregators();
            // If we're calculating no aggregate functions, we can exit at the
            // start of a new row. Otherwise, we have to wait until an agg
            int countOffset = rowAggregators.length == 0 ? 1 : 0;
            Region region = c.getEnvironment().getRegion();
            boolean acquiredLock = false;
            try {
                region.startRegionOperation();
                acquiredLock = true;
                synchronized (scanner) {
                    do {
                        List<Cell> kvs = useQualifierAsIndex ? new EncodedColumnQualiferCellsList(minMaxQualifiers.getFirst(), minMaxQualifiers.getSecond(), encodingScheme) : new ArrayList<Cell>();
                        // Results are potentially returned even when the return
                        // value of s.next is false
                        // since this is an indication of whether or not there
                        // are more values after the
                        // ones returned
                        hasMore = scanner.nextRaw(kvs);
                        if (!kvs.isEmpty()) {
                            result.setKeyValues(kvs);
                            key = TupleUtil.getConcatenatedValue(result, expressions);
                            aggBoundary = currentKey != null && currentKey.compareTo(key) != 0;
                            if (!aggBoundary) {
                                aggregators.aggregate(rowAggregators, result);
                                if (logger.isDebugEnabled()) {
                                    logger.debug(LogUtil.addCustomAnnotations("Row passed filters: " + kvs + ", aggregated values: " + Arrays.asList(rowAggregators), ScanUtil.getCustomAnnotations(scan)));
                                }
                                currentKey = key;
                            }
                        }
                        atLimit = rowCount + countOffset >= limit;
                    // Do rowCount + 1 b/c we don't have to wait for a complete
                    // row in the case of a DISTINCT with a LIMIT
                    } while (hasMore && !aggBoundary && !atLimit);
                }
            } finally {
                if (acquiredLock)
                    region.closeRegionOperation();
            }
            if (currentKey != null) {
                byte[] value = aggregators.toBytes(rowAggregators);
                KeyValue keyValue = KeyValueUtil.newKeyValue(currentKey.get(), currentKey.getOffset(), currentKey.getLength(), SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length);
                results.add(keyValue);
                if (logger.isDebugEnabled()) {
                    logger.debug(LogUtil.addCustomAnnotations("Adding new aggregate row: " + keyValue + ",for current key " + Bytes.toStringBinary(currentKey.get(), currentKey.getOffset(), currentKey.getLength()) + ", aggregated values: " + Arrays.asList(rowAggregators), ScanUtil.getCustomAnnotations(scan)));
                }
                // the returned result).
                if (aggBoundary) {
                    aggregators.reset(rowAggregators);
                    aggregators.aggregate(rowAggregators, result);
                    currentKey = key;
                    rowCount++;
                    atLimit |= rowCount >= limit;
                }
            }
            // Continue if there are more
            if (!atLimit && (hasMore || aggBoundary)) {
                return true;
            }
            currentKey = null;
            return false;
        }
    };
}
Also used : EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) KeyValue(org.apache.hadoop.hbase.KeyValue) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) Aggregator(org.apache.phoenix.expression.aggregator.Aggregator) PInteger(org.apache.phoenix.schema.types.PInteger) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple) MultiKeyValueTuple(org.apache.phoenix.schema.tuple.MultiKeyValueTuple) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple) Region(org.apache.hadoop.hbase.regionserver.Region) EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) List(java.util.List) ArrayList(java.util.ArrayList) Cell(org.apache.hadoop.hbase.Cell) MultiKeyValueTuple(org.apache.phoenix.schema.tuple.MultiKeyValueTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple)

Example 2 with EncodedColumnQualiferCellsList

use of org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList in project phoenix by apache.

the class RegionScannerResultIterator method next.

@Override
public Tuple next() throws SQLException {
    // stopRegionOperation
    synchronized (scanner) {
        try {
            // TODO: size
            List<Cell> results = useQualifierAsIndex ? new EncodedColumnQualiferCellsList(minMaxQualifiers.getFirst(), minMaxQualifiers.getSecond(), encodingScheme) : new ArrayList<Cell>();
            // Results are potentially returned even when the return value of s.next is false
            // since this is an indication of whether or not there are more values after the
            // ones returned
            boolean hasMore = scanner.nextRaw(results);
            if (!hasMore && results.isEmpty()) {
                return null;
            }
            // We instantiate a new tuple because in all cases currently we hang on to it
            // (i.e. to compute and hold onto the TopN).
            Tuple tuple = useQualifierAsIndex ? new PositionBasedMultiKeyValueTuple() : new MultiKeyValueTuple();
            tuple.setKeyValues(results);
            return tuple;
        } catch (IOException e) {
            throw ServerUtil.parseServerException(e);
        }
    }
}
Also used : EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple) MultiKeyValueTuple(org.apache.phoenix.schema.tuple.MultiKeyValueTuple) IOException(java.io.IOException) Cell(org.apache.hadoop.hbase.Cell) PositionBasedMultiKeyValueTuple(org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple) MultiKeyValueTuple(org.apache.phoenix.schema.tuple.MultiKeyValueTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple)

Example 3 with EncodedColumnQualiferCellsList

use of org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList in project phoenix by apache.

the class EncodedColumnQualifierCellsListTest method testListIteratorSet.

@Test
public void testListIteratorSet() {
    EncodedColumnQualiferCellsList list = new EncodedColumnQualiferCellsList(11, 16, FOUR_BYTE_QUALIFIERS);
    Cell[] array = new Cell[7];
    populateListAndArray(list, array);
    ListIterator<Cell> itr = list.listIterator();
    // This cell is KeyValue.createFirstOnRow(row, cf, getEncodedColumnQualifier(12))
    final Cell validCell = array[4];
    // This cell is KeyValue.createFirstOnRow(row, cf, getEncodedColumnQualifier(14))
    final Cell invalidCell = array[5];
    String validCellName = "Valid Cell";
    String invalidCellName = "Invalid Cell";
    Cell validReplacementCell = new DelegateCell(validCell, validCellName);
    Cell invalidReplacementCell = new DelegateCell(invalidCell, invalidCellName);
    int i = 0;
    while (itr.hasNext()) {
        Cell c = itr.next();
        if (i == 4) {
            itr.set(validReplacementCell);
        }
        if (i == 6) {
            try {
                itr.set(invalidReplacementCell);
                fail("This should have failed since " + invalidReplacementCell + " cannot be added where " + c + " is.");
            } catch (IllegalArgumentException expected) {
            }
        }
        i++;
    }
    itr = list.listIterator();
    i = 0;
    // Assert that the valid cell was added and invalid cell wasn't.
    while (itr.hasNext()) {
        Cell c = itr.next();
        if (i == 4) {
            assertEquals(validCellName, c.toString());
        }
        if (i == 6) {
            assertNotEquals(invalidCellName, c.toString());
        }
        i++;
    }
}
Also used : EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 4 with EncodedColumnQualiferCellsList

use of org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList in project phoenix by apache.

the class EncodedColumnQualifierCellsListTest method testFailFastListIterator.

@Test
public void testFailFastListIterator() throws Exception {
    EncodedColumnQualiferCellsList list = new EncodedColumnQualiferCellsList(11, 16, FOUR_BYTE_QUALIFIERS);
    populateList(list);
    ListIterator<Cell> itr = list.listIterator();
    itr.next();
    list.add(KeyValue.createFirstOnRow(row, cf, FOUR_BYTE_QUALIFIERS.encode(0)));
    try {
        itr.next();
        fail("ConcurrentModificationException should have been thrown as the list was modified without using iterator");
    } catch (ConcurrentModificationException expected) {
    }
    list = new EncodedColumnQualiferCellsList(11, 16, FOUR_BYTE_QUALIFIERS);
    populateList(list);
    itr = list.listIterator();
    itr.next();
    itr.next();
    itr.remove();
    itr.next();
    list.remove(KeyValue.createFirstOnRow(row, cf, FOUR_BYTE_QUALIFIERS.encode(0)));
    try {
        itr.next();
        fail("ConcurrentModificationException should have been thrown as the list was modified without using iterator");
    } catch (ConcurrentModificationException expected) {
    }
}
Also used : EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) ConcurrentModificationException(java.util.ConcurrentModificationException) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 5 with EncodedColumnQualiferCellsList

use of org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList in project phoenix by apache.

the class EncodedColumnQualifierCellsListTest method testListIterator.

@Test
public void testListIterator() throws Exception {
    EncodedColumnQualiferCellsList list = new EncodedColumnQualiferCellsList(11, 16, FOUR_BYTE_QUALIFIERS);
    Cell[] cells = new Cell[7];
    int i = 0;
    populateListAndArray(list, cells);
    ListIterator<Cell> itr = list.listIterator();
    assertTrue(itr.hasNext());
    // test itr.next()
    i = 0;
    while (itr.hasNext()) {
        assertEquals(cells[i++], itr.next());
    }
    assertEquals(7, list.size());
    // test itr.remove()
    itr = list.listIterator();
    i = 0;
    int numRemoved = 0;
    try {
        itr.remove();
        fail("Remove not allowed till next() is called");
    } catch (IllegalStateException expected) {
    }
    while (itr.hasNext()) {
        assertEquals(cells[i++], itr.next());
        itr.remove();
        numRemoved++;
    }
    assertEquals("Number of elements removed should have been the size of the list", 7, numRemoved);
    assertTrue(list.isEmpty());
}
Also used : EncodedColumnQualiferCellsList(org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

EncodedColumnQualiferCellsList (org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList)26 Test (org.junit.Test)22 Cell (org.apache.hadoop.hbase.Cell)21 ArrayList (java.util.ArrayList)4 MultiKeyValueTuple (org.apache.phoenix.schema.tuple.MultiKeyValueTuple)4 PositionBasedMultiKeyValueTuple (org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple)4 Tuple (org.apache.phoenix.schema.tuple.Tuple)4 Iterator (java.util.Iterator)3 ListIterator (java.util.ListIterator)3 Region (org.apache.hadoop.hbase.regionserver.Region)3 Aggregator (org.apache.phoenix.expression.aggregator.Aggregator)3 ConcurrentModificationException (java.util.ConcurrentModificationException)2 List (java.util.List)2 Configuration (org.apache.hadoop.conf.Configuration)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 RegionCoprocessorEnvironment (org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment)2 RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)2 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)2 IOException (java.io.IOException)1 SQLException (java.sql.SQLException)1