use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class MultiHfileOutputFormat method getRegionStartKeys.
/**
* Return the start keys of all of the regions in this table,
* as a list of ImmutableBytesWritable.
*/
private static Set<TableRowkeyPair> getRegionStartKeys(String tableName, RegionLocator table) throws IOException {
byte[][] byteKeys = table.getStartKeys();
Set<TableRowkeyPair> ret = new TreeSet<TableRowkeyPair>();
for (byte[] byteKey : byteKeys) {
// phoenix-2216: start : passing the table name and startkey
ret.add(new TableRowkeyPair(tableName, new ImmutableBytesWritable(byteKey)));
}
return ret;
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class StatisticsWriter method addStats.
/**
* Update a list of statistics for a given region. If the UPDATE STATISTICS <tablename> query is issued then we use
* Upsert queries to update the table If the region gets splitted or the major compaction happens we update using
* HTable.put()
*
* @param tracker
* - the statistics tracker
* @param cfKey
* - the family for which the stats is getting collected.
* @param mutations
* - list of mutations that collects all the mutations to commit in a batch
* @throws IOException
* if we fail to do any of the puts. Any single failure will prevent any future attempts for the
* remaining list of stats to update
*/
@SuppressWarnings("deprecation")
public void addStats(StatisticsCollector tracker, ImmutableBytesPtr cfKey, List<Mutation> mutations) throws IOException {
if (tracker == null) {
return;
}
boolean useMaxTimeStamp = clientTimeStamp == DefaultStatisticsCollector.NO_TIMESTAMP;
long timeStamp = clientTimeStamp;
if (useMaxTimeStamp) {
// When using max timestamp, we write the update time later because we only know the ts
// now
timeStamp = tracker.getMaxTimeStamp();
mutations.add(getLastStatsUpdatedTimePut(timeStamp));
}
GuidePostsInfo gps = tracker.getGuidePosts(cfKey);
if (gps != null) {
long[] byteCounts = gps.getByteCounts();
long[] rowCounts = gps.getRowCounts();
ImmutableBytesWritable keys = gps.getGuidePosts();
boolean hasGuidePosts = keys.getLength() > 0;
if (hasGuidePosts) {
int guidePostCount = 0;
try (ByteArrayInputStream stream = new ByteArrayInputStream(keys.get(), keys.getOffset(), keys.getLength())) {
DataInput input = new DataInputStream(stream);
PrefixByteDecoder decoder = new PrefixByteDecoder(gps.getMaxLength());
do {
ImmutableBytesWritable ptr = decoder.decode(input);
addGuidepost(cfKey, mutations, ptr, byteCounts[guidePostCount], rowCounts[guidePostCount], timeStamp);
guidePostCount++;
} while (decoder != null);
} catch (EOFException e) {
// Ignore as this signifies we're done
}
// If we've written guideposts with a guidepost key, then delete the
// empty guidepost indicator that may have been written by other
// regions.
byte[] rowKey = StatisticsUtil.getRowKey(tableName, cfKey, ByteUtil.EMPTY_IMMUTABLE_BYTE_ARRAY);
Delete delete = new Delete(rowKey, timeStamp);
mutations.add(delete);
} else {
addGuidepost(cfKey, mutations, ByteUtil.EMPTY_IMMUTABLE_BYTE_ARRAY, 0, 0, timeStamp);
}
}
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class ArrayAppendFunctionTest method testForCorrectSeparatorBytes3.
@Test
public void testForCorrectSeparatorBytes3() throws Exception {
Object[] o = new Object[] { "a", null, null, "c" };
Object element = "d";
PDataType baseType = PVarchar.INSTANCE;
PhoenixArray arr = new PhoenixArray(baseType, o);
LiteralExpression arrayLiteral, elementLiteral;
arrayLiteral = LiteralExpression.newConstant(arr, PVarcharArray.INSTANCE, null, null, SortOrder.DESC, Determinism.ALWAYS);
elementLiteral = LiteralExpression.newConstant(element, baseType, null, null, SortOrder.ASC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) arrayLiteral);
expressions.add(elementLiteral);
Expression arrayAppendFunction = new ArrayAppendFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayAppendFunction.evaluate(null, ptr);
byte[] expected = new byte[] { -98, -1, 0, -2, -100, -1, -101, -1, -1, -1, -128, 1, -128, 3, -128, 3, -128, 5, -128, 7, 0, 0, 0, 10, 0, 0, 0, 5, 1 };
assertArrayEquals(expected, ptr.get());
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class ArrayAppendFunctionTest method testForCorrectSeparatorBytes2.
@Test
public void testForCorrectSeparatorBytes2() throws Exception {
Object[] o = new Object[] { "a", "b", "c" };
Object element = "d";
PDataType baseType = PVarchar.INSTANCE;
PhoenixArray arr = new PhoenixArray(baseType, o);
LiteralExpression arrayLiteral, elementLiteral;
arrayLiteral = LiteralExpression.newConstant(arr, PVarcharArray.INSTANCE, null, null, SortOrder.DESC, Determinism.ALWAYS);
elementLiteral = LiteralExpression.newConstant(element, baseType, null, null, SortOrder.ASC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) arrayLiteral);
expressions.add(elementLiteral);
Expression arrayAppendFunction = new ArrayAppendFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayAppendFunction.evaluate(null, ptr);
byte[] expected = new byte[] { -98, -1, -99, -1, -100, -1, -101, -1, -1, -1, -128, 1, -128, 3, -128, 5, -128, 7, 0, 0, 0, 10, 0, 0, 0, 4, 1 };
assertArrayEquals(expected, ptr.get());
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class ArrayConcatFunctionTest method testForCorrectSeparatorBytes4.
@Test
public void testForCorrectSeparatorBytes4() throws Exception {
Object[] o1 = new Object[] { "a", "b", null };
Object[] o2 = new Object[] { null, "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal = LiteralExpression.newConstant(arr1, type, null, null, SortOrder.ASC, Determinism.ALWAYS);
array2Literal = LiteralExpression.newConstant(arr2, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected = new byte[] { 97, 0, 98, 0, 0, -2, 99, 0, 100, 0, 101, 0, 0, 0, -128, 1, -128, 3, -128, 5, -128, 5, -128, 7, -128, 9, -128, 11, 0, 0, 0, 14, 0, 0, 0, 7, 1 };
assertArrayEquals(expected, ptr.get());
}
Aggregations