use of org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode in project hbase by apache.
the class TestRowDataUrlsExample method main.
/**
* Used for generating docs.
*/
public static void main(String... args) throws IOException {
System.out.println("-- inputs --");
System.out.println(KeyValueTestUtil.toStringWithPadding(kvs, true));
ByteArrayOutputStream os = new ByteArrayOutputStream(1 << 20);
PrefixTreeEncoder encoder = new PrefixTreeEncoder(os, false);
for (KeyValue kv : kvs) {
encoder.write(kv);
}
encoder.flush();
System.out.println("-- qualifier SortedPtBuilderNodes --");
for (TokenizerNode tokenizer : encoder.getQualifierWriter().getNonLeaves()) {
System.out.println(tokenizer);
}
for (TokenizerNode tokenizerNode : encoder.getQualifierWriter().getLeaves()) {
System.out.println(tokenizerNode);
}
System.out.println("-- qualifier PtColumnNodeWriters --");
for (ColumnNodeWriter writer : encoder.getQualifierWriter().getColumnNodeWriters()) {
System.out.println(writer);
}
System.out.println("-- rowKey SortedPtBuilderNodes --");
for (TokenizerNode tokenizerNode : encoder.getRowWriter().getNonLeaves()) {
System.out.println(tokenizerNode);
}
for (TokenizerNode tokenizerNode : encoder.getRowWriter().getLeaves()) {
System.out.println(tokenizerNode);
}
System.out.println("-- row PtRowNodeWriters --");
for (RowNodeWriter writer : encoder.getRowWriter().getNonLeafWriters()) {
System.out.println(writer);
}
for (RowNodeWriter writer : encoder.getRowWriter().getLeafWriters()) {
System.out.println(writer);
}
System.out.println("-- concatenated values --");
System.out.println(Bytes.toStringBinary(encoder.getValueByteRange().deepCopyToNewArray()));
}
use of org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode in project hbase by apache.
the class RowNodeWriter method writeNextRowTrieNodeOffsets.
/**
* If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
*/
protected void writeNextRowTrieNodeOffsets(OutputStream os) throws IOException {
ArrayList<TokenizerNode> children = tokenizerNode.getChildren();
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
int distanceToChild = tokenizerNode.getNegativeIndex() - child.getNegativeIndex();
UFIntTool.writeBytes(blockMeta.getNextNodeOffsetWidth(), distanceToChild, os);
}
}
Aggregations