Search in sources :

Example 1 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class UngroupedAggregatingResultIterator method next.

@Override
public Tuple next() throws SQLException {
    Tuple result = super.next();
    // Ensure ungrouped aggregregation always returns a row, even if the underlying iterator doesn't.
    if (result == null && !hasRows) {
        // We should reset ClientAggregators here in case they are being reused in a new ResultIterator.
        aggregators.reset(aggregators.getAggregators());
        byte[] value = aggregators.toBytes(aggregators.getAggregators());
        result = new SingleKeyValueTuple(KeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value));
    }
    hasRows = true;
    return result;
}
Also used : SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple)

Example 2 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class CorrelatePlanTest method testCorrelatePlan.

private void testCorrelatePlan(Object[][] leftRelation, Object[][] rightRelation, int leftCorrelColumn, int rightCorrelColumn, JoinType type, Object[][] expectedResult, Integer offset) throws SQLException {
    TableRef leftTable = createProjectedTableFromLiterals(leftRelation[0]);
    TableRef rightTable = createProjectedTableFromLiterals(rightRelation[0]);
    String varName = "$cor0";
    RuntimeContext runtimeContext = new RuntimeContextImpl();
    runtimeContext.defineCorrelateVariable(varName, leftTable);
    QueryPlan leftPlan = newLiteralResultIterationPlan(leftRelation, offset);
    QueryPlan rightPlan = newLiteralResultIterationPlan(rightRelation, offset);
    Expression columnExpr = new ColumnRef(rightTable, rightCorrelColumn).newColumnExpression();
    Expression fieldAccess = new CorrelateVariableFieldAccessExpression(runtimeContext, varName, new ColumnRef(leftTable, leftCorrelColumn).newColumnExpression());
    Expression filter = ComparisonExpression.create(CompareOp.EQUAL, Arrays.asList(columnExpr, fieldAccess), CONTEXT.getTempPtr(), false);
    rightPlan = new ClientScanPlan(CONTEXT, SelectStatement.SELECT_ONE, rightTable, RowProjector.EMPTY_PROJECTOR, null, null, filter, OrderBy.EMPTY_ORDER_BY, rightPlan);
    PTable joinedTable = JoinCompiler.joinProjectedTables(leftTable.getTable(), rightTable.getTable(), type);
    CorrelatePlan correlatePlan = new CorrelatePlan(leftPlan, rightPlan, varName, type, false, runtimeContext, joinedTable, leftTable.getTable(), rightTable.getTable(), leftTable.getTable().getColumns().size());
    ResultIterator iter = correlatePlan.iterator();
    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
    for (Object[] row : expectedResult) {
        Tuple next = iter.next();
        assertNotNull(next);
        for (int i = 0; i < row.length; i++) {
            PColumn column = joinedTable.getColumns().get(i);
            boolean eval = new ProjectedColumnExpression(column, joinedTable, column.getName().getString()).evaluate(next, ptr);
            Object o = eval ? column.getDataType().toObject(ptr) : null;
            assertEquals(row[i], o);
        }
    }
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) CorrelateVariableFieldAccessExpression(org.apache.phoenix.expression.CorrelateVariableFieldAccessExpression) ResultIterator(org.apache.phoenix.iterate.ResultIterator) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) QueryPlan(org.apache.phoenix.compile.QueryPlan) PTable(org.apache.phoenix.schema.PTable) PColumn(org.apache.phoenix.schema.PColumn) Expression(org.apache.phoenix.expression.Expression) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ComparisonExpression(org.apache.phoenix.expression.ComparisonExpression) CorrelateVariableFieldAccessExpression(org.apache.phoenix.expression.CorrelateVariableFieldAccessExpression) ColumnRef(org.apache.phoenix.schema.ColumnRef) TableRef(org.apache.phoenix.schema.TableRef) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple)

Example 3 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class UnnestArrayPlanTest method testUnnestArrays.

private void testUnnestArrays(PArrayDataType arrayType, List<Object[]> arrays, boolean withOrdinality) throws Exception {
    PDataType baseType = PDataType.fromTypeId(arrayType.getSqlType() - PDataType.ARRAY_TYPE_BASE);
    List<Tuple> tuples = toTuples(arrayType, arrays);
    LiteralResultIterationPlan subPlan = new LiteralResultIterationPlan(tuples, CONTEXT, SelectStatement.SELECT_ONE, TableRef.EMPTY_TABLE_REF, RowProjector.EMPTY_PROJECTOR, null, null, OrderBy.EMPTY_ORDER_BY, null);
    LiteralExpression dummy = LiteralExpression.newConstant(null, arrayType);
    RowKeyValueAccessor accessor = new RowKeyValueAccessor(Arrays.asList(dummy), 0);
    UnnestArrayPlan plan = new UnnestArrayPlan(subPlan, new RowKeyColumnExpression(dummy, accessor), withOrdinality);
    PName colName = PNameFactory.newName("ELEM");
    PColumn elemColumn = new PColumnImpl(PNameFactory.newName("ELEM"), PNameFactory.newName(VALUE_COLUMN_FAMILY), baseType, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false, false, colName.getBytes());
    colName = PNameFactory.newName("IDX");
    PColumn indexColumn = withOrdinality ? new PColumnImpl(colName, PNameFactory.newName(VALUE_COLUMN_FAMILY), PInteger.INSTANCE, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false, false, colName.getBytes()) : null;
    List<PColumn> columns = withOrdinality ? Arrays.asList(elemColumn, indexColumn) : Arrays.asList(elemColumn);
    ProjectedColumnExpression elemExpr = new ProjectedColumnExpression(elemColumn, columns, 0, elemColumn.getName().getString());
    ProjectedColumnExpression indexExpr = withOrdinality ? new ProjectedColumnExpression(indexColumn, columns, 1, indexColumn.getName().getString()) : null;
    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
    ResultIterator iterator = plan.iterator();
    for (Object[] o : flatten(arrays)) {
        Tuple tuple = iterator.next();
        assertNotNull(tuple);
        assertTrue(elemExpr.evaluate(tuple, ptr));
        Object elem = baseType.toObject(ptr);
        assertEquals(o[0], elem);
        if (withOrdinality) {
            assertTrue(indexExpr.evaluate(tuple, ptr));
            Object index = PInteger.INSTANCE.toObject(ptr);
            assertEquals(o[1], index);
        }
    }
    assertNull(iterator.next());
}
Also used : PColumnImpl(org.apache.phoenix.schema.PColumnImpl) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) RowKeyValueAccessor(org.apache.phoenix.schema.RowKeyValueAccessor) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ResultIterator(org.apache.phoenix.iterate.ResultIterator) ProjectedColumnExpression(org.apache.phoenix.expression.ProjectedColumnExpression) RowKeyColumnExpression(org.apache.phoenix.expression.RowKeyColumnExpression) PColumn(org.apache.phoenix.schema.PColumn) PDataType(org.apache.phoenix.schema.types.PDataType) PName(org.apache.phoenix.schema.PName) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple)

Example 4 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class ConcatResultIteratorTest method testMergeSort.

@Test
public void testMergeSort() throws Throwable {
    Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(C, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))) };
    Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(D, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
    final List<PeekingResultIterator> results = new ArrayList<PeekingResultIterator>(Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) }));
    Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))), new SingleKeyValueTuple(new KeyValue(C, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(D, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
    ResultIterators iterators = new ResultIterators() {

        @Override
        public List<PeekingResultIterator> getIterators() throws SQLException {
            return results;
        }

        @Override
        public int size() {
            return results.size();
        }

        @Override
        public void explain(List<String> planSteps) {
        }

        @Override
        public List<KeyRange> getSplits() {
            return Collections.emptyList();
        }

        @Override
        public List<List<Scan>> getScans() {
            return Collections.emptyList();
        }

        @Override
        public void close() throws SQLException {
        }
    };
    ResultIterator scanner = new MergeSortRowKeyResultIterator(iterators);
    AssertResults.assertResults(scanner, expectedResults);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) KeyRange(org.apache.phoenix.query.KeyRange) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Test(org.junit.Test)

Example 5 with Tuple

use of org.apache.phoenix.schema.tuple.Tuple in project phoenix by apache.

the class MergeSortResultIteratorTest method testReverseMergeSort.

@Test
public void testReverseMergeSort() throws Throwable {
    Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    final List<PeekingResultIterator> results = new ArrayList<PeekingResultIterator>(Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) }));
    Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
    ResultIterators iterators = new ResultIterators() {

        @Override
        public List<PeekingResultIterator> getIterators() throws SQLException {
            return results;
        }

        @Override
        public int size() {
            return results.size();
        }

        @Override
        public void explain(List<String> planSteps) {
        }

        @Override
        public List<KeyRange> getSplits() {
            return Collections.emptyList();
        }

        @Override
        public List<List<Scan>> getScans() {
            return Collections.emptyList();
        }

        @Override
        public void close() throws SQLException {
        }
    };
    ResultIterator scanner = new MergeSortRowKeyResultIterator(iterators, 0, true);
    AssertResults.assertResults(scanner, expectedResults);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) KeyRange(org.apache.phoenix.query.KeyRange) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Tuple(org.apache.phoenix.schema.tuple.Tuple) SingleKeyValueTuple(org.apache.phoenix.schema.tuple.SingleKeyValueTuple) Test(org.junit.Test)

Aggregations

Tuple (org.apache.phoenix.schema.tuple.Tuple)48 SingleKeyValueTuple (org.apache.phoenix.schema.tuple.SingleKeyValueTuple)22 KeyValue (org.apache.hadoop.hbase.KeyValue)16 List (java.util.List)10 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)10 ArrayList (java.util.ArrayList)9 Test (org.junit.Test)9 Expression (org.apache.phoenix.expression.Expression)8 SQLException (java.sql.SQLException)7 Cell (org.apache.hadoop.hbase.Cell)6 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)6 IOException (java.io.IOException)5 Region (org.apache.hadoop.hbase.regionserver.Region)5 ProjectedColumnExpression (org.apache.phoenix.expression.ProjectedColumnExpression)5 Aggregator (org.apache.phoenix.expression.aggregator.Aggregator)5 ResultIterator (org.apache.phoenix.iterate.ResultIterator)5 PColumn (org.apache.phoenix.schema.PColumn)5 ResultTuple (org.apache.phoenix.schema.tuple.ResultTuple)5 ClientAggregators (org.apache.phoenix.expression.aggregator.ClientAggregators)4 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)4