Search in sources :

Example 41 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class HashCacheClient method serialize.

private void serialize(ImmutableBytesWritable ptr, ResultIterator iterator, long estimatedSize, List<Expression> onExpressions, boolean singleValueOnly, Expression keyRangeRhsExpression, List<Expression> keyRangeRhsValues) throws SQLException {
    long maxSize = serverCache.getConnection().getQueryServices().getProps().getLong(QueryServices.MAX_SERVER_CACHE_SIZE_ATTRIB, QueryServicesOptions.DEFAULT_MAX_SERVER_CACHE_SIZE);
    estimatedSize = Math.min(estimatedSize, maxSize);
    if (estimatedSize > Integer.MAX_VALUE) {
        throw new IllegalStateException("Estimated size(" + estimatedSize + ") must not be greater than Integer.MAX_VALUE(" + Integer.MAX_VALUE + ")");
    }
    try {
        TrustedByteArrayOutputStream baOut = new TrustedByteArrayOutputStream((int) estimatedSize);
        DataOutputStream out = new DataOutputStream(baOut);
        // Write onExpressions first, for hash key evaluation along with deserialization
        out.writeInt(onExpressions.size());
        for (Expression expression : onExpressions) {
            WritableUtils.writeVInt(out, ExpressionType.valueOf(expression).ordinal());
            expression.write(out);
        }
        int exprSize = baOut.size() + Bytes.SIZEOF_INT;
        out.writeInt(exprSize * (singleValueOnly ? -1 : 1));
        int nRows = 0;
        // In the end will be replaced with total number of rows
        out.writeInt(nRows);
        ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
        for (Tuple result = iterator.next(); result != null; result = iterator.next()) {
            TupleUtil.write(result, out);
            if (baOut.size() > maxSize) {
                throw new MaxServerCacheSizeExceededException("Size of hash cache (" + baOut.size() + " bytes) exceeds the maximum allowed size (" + maxSize + " bytes)");
            }
            // Evaluate key expressions for hash join key range optimization.
            if (keyRangeRhsExpression != null) {
                keyRangeRhsValues.add(evaluateKeyExpression(keyRangeRhsExpression, result, tempPtr));
            }
            nRows++;
        }
        TrustedByteArrayOutputStream sizeOut = new TrustedByteArrayOutputStream(Bytes.SIZEOF_INT);
        DataOutputStream dataOut = new DataOutputStream(sizeOut);
        try {
            dataOut.writeInt(nRows);
            dataOut.flush();
            byte[] cache = baOut.getBuffer();
            // Replace number of rows written above with the correct value.
            System.arraycopy(sizeOut.getBuffer(), 0, cache, exprSize, sizeOut.size());
            // Reallocate to actual size plus compressed buffer size (which is allocated below)
            int maxCompressedSize = Snappy.maxCompressedLength(baOut.size());
            // size for worst case
            byte[] compressed = new byte[maxCompressedSize];
            int compressedSize = Snappy.compress(baOut.getBuffer(), 0, baOut.size(), compressed, 0);
            // Last realloc to size of compressed buffer.
            ptr.set(compressed, 0, compressedSize);
        } finally {
            dataOut.close();
        }
    } catch (IOException e) {
        throw ServerUtil.parseServerException(e);
    } finally {
        iterator.close();
    }
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Expression(org.apache.phoenix.expression.Expression) RowValueConstructorExpression(org.apache.phoenix.expression.RowValueConstructorExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) DataOutputStream(java.io.DataOutputStream) TrustedByteArrayOutputStream(org.apache.phoenix.util.TrustedByteArrayOutputStream) IOException(java.io.IOException) Tuple(org.apache.phoenix.schema.tuple.Tuple)

Example 42 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class NonAggregateRegionScannerFactory method getTopNScanner.

/**
 *  Return region scanner that does TopN.
 *  We only need to call startRegionOperation and closeRegionOperation when
 *  getting the first Tuple (which forces running through the entire region)
 *  since after this everything is held in memory
 */
private RegionScanner getTopNScanner(RegionCoprocessorEnvironment env, final RegionScanner s, final OrderedResultIterator iterator, ImmutableBytesPtr tenantId) throws Throwable {
    final Tuple firstTuple;
    TenantCache tenantCache = GlobalCache.getTenantCache(env, tenantId);
    long estSize = iterator.getEstimatedByteSize();
    final MemoryManager.MemoryChunk chunk = tenantCache.getMemoryManager().allocate(estSize);
    final Region region = getRegion();
    region.startRegionOperation();
    try {
        // Once we return from the first call to next, we've run through and cached
        // the topN rows, so we no longer need to start/stop a region operation.
        firstTuple = iterator.next();
        // Now that the topN are cached, we can resize based on the real size
        long actualSize = iterator.getByteSize();
        chunk.resize(actualSize);
    } catch (Throwable t) {
        ServerUtil.throwIOException(region.getRegionInfo().getRegionNameAsString(), t);
        return null;
    } finally {
        region.closeRegionOperation();
    }
    return new BaseRegionScanner(s) {

        private Tuple tuple = firstTuple;

        @Override
        public boolean isFilterDone() {
            return tuple == null;
        }

        @Override
        public boolean next(List<Cell> results) throws IOException {
            try {
                if (isFilterDone()) {
                    return false;
                }
                for (int i = 0; i < tuple.size(); i++) {
                    results.add(tuple.getValue(i));
                }
                tuple = iterator.next();
                return !isFilterDone();
            } catch (Throwable t) {
                ServerUtil.throwIOException(region.getRegionInfo().getRegionNameAsString(), t);
                return false;
            }
        }

        @Override
        public void close() throws IOException {
            try {
                s.close();
            } finally {
                try {
                    if (iterator != null) {
                        iterator.close();
                    }
                } catch (SQLException e) {
                    ServerUtil.throwIOException(region.getRegionInfo().getRegionNameAsString(), e);
                } finally {
                    chunk.close();
                }
            }
        }
    };
}
Also used : TenantCache(org.apache.phoenix.cache.TenantCache) SQLException(java.sql.SQLException) Region(org.apache.hadoop.hbase.regionserver.Region) BaseRegionScanner(org.apache.phoenix.coprocessor.BaseRegionScanner) List(java.util.List) ArrayList(java.util.ArrayList) MemoryManager(org.apache.phoenix.memory.MemoryManager) Tuple(org.apache.phoenix.schema.tuple.Tuple) ResultTuple(org.apache.phoenix.schema.tuple.ResultTuple)

Example 43 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class FilterResultIterator method advance.

@Override
protected Tuple advance() throws SQLException {
    Tuple next;
    do {
        next = delegate.next();
        expression.reset();
    } while (next != null && (!expression.evaluate(next, ptr) || ptr.getLength() == 0 || !Boolean.TRUE.equals(expression.getDataType().toObject(ptr))));
    return next;
}
Also used : Tuple(org.apache.phoenix.schema.tuple.Tuple)

Example 44 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class ConcatResultIteratorTest method testConcat.

@Test
public void testConcat() throws Throwable {
    Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))) };
    Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
    final List<PeekingResultIterator> results = Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) });
    ResultIterators iterators = new MaterializedResultIterators(results);
    Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
    ResultIterator scanner = new ConcatResultIterator(iterators);
    AssertResults.assertResults(scanner, expectedResults);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Test(org.junit.Test)

Example 45 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class RowKeyOrderedAggregateResultIteratorTest method testNoSpan.

@Test
public void testNoSpan() throws Exception {
    Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1L))) };
    Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2L))) };
    Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(C, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3L))), new SingleKeyValueTuple(new KeyValue(D, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4L))) };
    final List<PeekingResultIterator> results = Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) });
    ResultIterators iterators = new MaterializedResultIterators(results);
    Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1L))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2L))), new SingleKeyValueTuple(new KeyValue(C, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3L))), new SingleKeyValueTuple(new KeyValue(D, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4L))) };
    ClientAggregators aggregators = TestUtil.getSingleSumAggregator(getUrl(), PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES));
    ResultIterator scanner = new RowKeyOrderedAggregateResultIterator(iterators, aggregators);
    AssertResults.assertResults(scanner, expectedResults);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ClientAggregators(org.apache.phoenix.expression.aggregator.ClientAggregators) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Test(org.junit.Test) BaseConnectionlessQueryTest(org.apache.phoenix.query.BaseConnectionlessQueryTest)

Aggregations

Tuple (org.apache.phoenix.schema.tuple.Tuple)48 SingleKeyValueTuple (org.apache.phoenix.schema.tuple.SingleKeyValueTuple)22 KeyValue (org.apache.hadoop.hbase.KeyValue)16 List (java.util.List)10 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)10 ArrayList (java.util.ArrayList)9 Test (org.junit.Test)9 Expression (org.apache.phoenix.expression.Expression)8 SQLException (java.sql.SQLException)7 Cell (org.apache.hadoop.hbase.Cell)6 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)6 IOException (java.io.IOException)5 Region (org.apache.hadoop.hbase.regionserver.Region)5 ProjectedColumnExpression (org.apache.phoenix.expression.ProjectedColumnExpression)5 Aggregator (org.apache.phoenix.expression.aggregator.Aggregator)5 ResultIterator (org.apache.phoenix.iterate.ResultIterator)5 PColumn (org.apache.phoenix.schema.PColumn)5 ResultTuple (org.apache.phoenix.schema.tuple.ResultTuple)5 ClientAggregators (org.apache.phoenix.expression.aggregator.ClientAggregators)4 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)4