use of org.apache.phoenix.schema.tuple.SingleKeyValueTuple in project phoenix by apache.
the class LiteralResultIteratorPlanTest method newLiteralResultIterationPlan.
private QueryPlan newLiteralResultIterationPlan(Integer offset, Integer limit) throws SQLException {
List<Tuple> tuples = Lists.newArrayList();
Tuple baseTuple = new SingleKeyValueTuple(KeyValue.LOWESTKEY);
for (Object[] row : RELATION) {
Expression[] exprs = new Expression[row.length];
for (int i = 0; i < row.length; i++) {
exprs[i] = LiteralExpression.newConstant(row[i]);
}
TupleProjector projector = new TupleProjector(exprs);
tuples.add(projector.projectResults(baseTuple));
}
return new LiteralResultIterationPlan(tuples, CONTEXT, SelectStatement.SELECT_ONE, TableRef.EMPTY_TABLE_REF, RowProjector.EMPTY_PROJECTOR, limit, offset, OrderBy.EMPTY_ORDER_BY, null);
}
use of org.apache.phoenix.schema.tuple.SingleKeyValueTuple in project phoenix by apache.
the class DistinctValueWithCountClientAggregator method aggregate.
@Override
public void aggregate(Tuple tuple, ImmutableBytesWritable ptr) {
if (tuple instanceof SingleKeyValueTuple) {
// Case when scanners do look ahead and re-aggregate result row.The result is already available in the ptr
PDataType resultDataType = getResultDataType();
cachedResult = resultDataType.toObject(ptr, resultDataType, sortOrder);
} else {
InputStream is;
try {
if (Bytes.equals(ptr.get(), ptr.getOffset(), 1, DistinctValueWithCountServerAggregator.COMPRESS_MARKER, 0, 1)) {
// This reads the uncompressed length from the front of the compressed input
int uncompressedLength = Snappy.getUncompressedLength(ptr.get(), ptr.getOffset() + 1);
byte[] uncompressed = new byte[uncompressedLength];
// This will throw CorruptionException, a RuntimeException if the snappy data is invalid.
// We're making a RuntimeException out of a checked IOException below so assume it's ok
// to let any CorruptionException escape.
Snappy.uncompress(ptr.get(), ptr.getOffset() + 1, ptr.getLength() - 1, uncompressed, 0);
is = new ByteArrayInputStream(uncompressed, 0, uncompressedLength);
} else {
is = new ByteArrayInputStream(ptr.get(), ptr.getOffset() + 1, ptr.getLength() - 1);
}
DataInputStream in = new DataInputStream(is);
int mapSize = WritableUtils.readVInt(in);
for (int i = 0; i < mapSize; i++) {
int keyLen = WritableUtils.readVInt(in);
byte[] keyBytes = new byte[keyLen];
in.read(keyBytes, 0, keyLen);
ImmutableBytesPtr key = new ImmutableBytesPtr(keyBytes);
int value = WritableUtils.readVInt(in);
Integer curCount = valueVsCount.get(key);
if (curCount == null) {
valueVsCount.put(key, value);
} else {
valueVsCount.put(key, curCount + value);
}
totalCount += value;
}
} catch (IOException ioe) {
// Impossible as we're using a ByteArrayInputStream
throw new RuntimeException(ioe);
}
}
if (buffer == null) {
initBuffer();
}
}
use of org.apache.phoenix.schema.tuple.SingleKeyValueTuple in project phoenix by apache.
the class AggregateResultScannerTest method testAggregatingMergeSort.
@Test
public void testAggregatingMergeSort() throws Throwable {
Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(1L))) };
Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(1L))) };
Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(1L))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(1L))) };
Tuple[] results4 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(1L))) };
final List<PeekingResultIterator> results = new ArrayList<PeekingResultIterator>(Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)), new MaterializedResultIterator(Arrays.asList(results4)) }));
Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(3L))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, PLong.INSTANCE.toBytes(2L))) };
ResultIterators iterators = new MaterializedResultIterators(results);
ClientAggregators aggregators = TestUtil.getSingleSumAggregator(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES));
ResultIterator scanner = new GroupedAggregatingResultIterator(new MergeSortRowKeyResultIterator(iterators), aggregators);
AssertResults.assertResults(scanner, expectedResults);
}
use of org.apache.phoenix.schema.tuple.SingleKeyValueTuple in project phoenix by apache.
the class ConcatResultIteratorTest method testConcat.
@Test
public void testConcat() throws Throwable {
Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))) };
Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
final List<PeekingResultIterator> results = Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) });
ResultIterators iterators = new MaterializedResultIterators(results);
Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(2))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(3))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(4))) };
ResultIterator scanner = new ConcatResultIterator(iterators);
AssertResults.assertResults(scanner, expectedResults);
}
use of org.apache.phoenix.schema.tuple.SingleKeyValueTuple in project phoenix by apache.
the class MergeSortResultIteratorTest method testMergeSort.
@Test
public void testMergeSort() throws Throwable {
Tuple[] results1 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
Tuple[] results2 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
Tuple[] results3 = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
final List<PeekingResultIterator> results = new ArrayList<PeekingResultIterator>(Arrays.asList(new PeekingResultIterator[] { new MaterializedResultIterator(Arrays.asList(results1)), new MaterializedResultIterator(Arrays.asList(results2)), new MaterializedResultIterator(Arrays.asList(results3)) }));
Tuple[] expectedResults = new Tuple[] { new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(A, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))), new SingleKeyValueTuple(new KeyValue(B, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, Bytes.toBytes(1))) };
ResultIterators iterators = new ResultIterators() {
@Override
public List<PeekingResultIterator> getIterators() throws SQLException {
return results;
}
@Override
public int size() {
return results.size();
}
@Override
public void explain(List<String> planSteps) {
}
@Override
public List<KeyRange> getSplits() {
return Collections.emptyList();
}
@Override
public List<List<Scan>> getScans() {
return Collections.emptyList();
}
@Override
public void close() throws SQLException {
}
};
ResultIterators reverseIterators = new ResultIterators() {
@Override
public List<PeekingResultIterator> getIterators() throws SQLException {
return results;
}
@Override
public int size() {
return results.size();
}
@Override
public void explain(List<String> planSteps) {
}
@Override
public List<KeyRange> getSplits() {
return Collections.emptyList();
}
@Override
public List<List<Scan>> getScans() {
return Collections.emptyList();
}
@Override
public void close() throws SQLException {
}
};
ResultIterator scanner = new MergeSortRowKeyResultIterator(iterators);
AssertResults.assertResults(scanner, expectedResults);
}
Aggregations