use of org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser in project hbase by apache.
the class TestImportTsvParser method testTsvParserSpecParsing.
@Test
public void testTsvParserSpecParsing() {
TsvParser parser;
parser = new TsvParser("HBASE_ROW_KEY", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertEquals(0, parser.getRowKeyColumnIndex());
assertFalse(parser.hasTimestamp());
parser = new TsvParser("HBASE_ROW_KEY,col1:scol1", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1));
assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1));
assertEquals(0, parser.getRowKeyColumnIndex());
assertFalse(parser.hasTimestamp());
parser = new TsvParser("HBASE_ROW_KEY,col1:scol1,col1:scol2", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1));
assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(2));
assertBytesEquals(Bytes.toBytes("scol2"), parser.getQualifier(2));
assertEquals(0, parser.getRowKeyColumnIndex());
assertFalse(parser.hasTimestamp());
parser = new TsvParser("HBASE_ROW_KEY,col1:scol1,HBASE_TS_KEY,col1:scol2", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1));
assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(3));
assertBytesEquals(Bytes.toBytes("scol2"), parser.getQualifier(3));
assertEquals(0, parser.getRowKeyColumnIndex());
assertTrue(parser.hasTimestamp());
assertEquals(2, parser.getTimestampKeyColumnIndex());
parser = new TsvParser("HBASE_ROW_KEY,col1:scol1,HBASE_TS_KEY,col1:scol2,HBASE_ATTRIBUTES_KEY", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1));
assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(3));
assertBytesEquals(Bytes.toBytes("scol2"), parser.getQualifier(3));
assertEquals(0, parser.getRowKeyColumnIndex());
assertTrue(parser.hasTimestamp());
assertEquals(2, parser.getTimestampKeyColumnIndex());
assertEquals(4, parser.getAttributesKeyColumnIndex());
parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col1:scol1,HBASE_TS_KEY,col1:scol2,HBASE_ROW_KEY", "\t");
assertNull(parser.getFamily(0));
assertNull(parser.getQualifier(0));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1));
assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1));
assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(3));
assertBytesEquals(Bytes.toBytes("scol2"), parser.getQualifier(3));
assertEquals(4, parser.getRowKeyColumnIndex());
assertTrue(parser.hasTimestamp());
assertEquals(2, parser.getTimestampKeyColumnIndex());
assertEquals(0, parser.getAttributesKeyColumnIndex());
}
use of org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser in project hbase by apache.
the class TestImportTsvParser method testTsvParseAttributesKey.
@Test
public void testTsvParseAttributesKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY,HBASE_ATTRIBUTES_KEY", "\t");
assertEquals(0, parser.getRowKeyColumnIndex());
byte[] line = Bytes.toBytes("rowkey\tval_a\t1234\tkey=>value");
ParsedLine parse = parser.parse(line, line.length);
assertEquals(18, parse.getAttributeKeyOffset());
assertEquals(3, parser.getAttributesKeyColumnIndex());
String[] attributes = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
try {
line = Bytes.toBytes("rowkey\tval_a\t1234");
parser.parse(line, line.length);
fail("Should get BadTsvLineException on empty rowkey.");
} catch (BadTsvLineException b) {
}
parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
assertEquals(2, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
parse = parser.parse(line, line.length);
assertEquals(0, parse.getAttributeKeyOffset());
assertEquals(0, parser.getAttributesKeyColumnIndex());
attributes = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
try {
line = Bytes.toBytes("val_a");
ParsedLine parse2 = parser.parse(line, line.length);
fail("Should get BadTsvLineException when number of columns less than rowkey position.");
} catch (BadTsvLineException b) {
}
parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
assertEquals(3, parser.getRowKeyColumnIndex());
line = Bytes.toBytes("val_a\tkey0=>value0,key1=>value1,key2=>value2\t1234\trowkey");
parse = parser.parse(line, line.length);
assertEquals(1, parser.getAttributesKeyColumnIndex());
assertEquals(6, parse.getAttributeKeyOffset());
String[] attr = parse.getIndividualAttributes();
int i = 0;
for (String str : attr) {
assertEquals(("key" + i + "=>" + "value" + i), str);
i++;
}
}
use of org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser in project hbase by apache.
the class TestImportTsvParser method testTsvParserWithCellVisibilityCol.
@Test
public void testTsvParserWithCellVisibilityCol() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY,HBASE_ATTRIBUTES_KEY,HBASE_CELL_VISIBILITY", "\t");
assertEquals(0, parser.getRowKeyColumnIndex());
assertEquals(4, parser.getCellVisibilityColumnIndex());
byte[] line = Bytes.toBytes("rowkey\tval_a\t1234\tkey=>value\tPRIVATE&SECRET");
ParsedLine parse = parser.parse(line, line.length);
assertEquals(18, parse.getAttributeKeyOffset());
assertEquals(3, parser.getAttributesKeyColumnIndex());
String[] attributes = parse.getIndividualAttributes();
assertEquals(attributes[0], "key=>value");
assertEquals(29, parse.getCellVisibilityColumnOffset());
}
use of org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser in project hbase by apache.
the class TestImportTsvParser method testTsvParserBadTsvLineOnlyKey.
@Test(expected = BadTsvLineException.class)
public void testTsvParserBadTsvLineOnlyKey() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t");
byte[] line = Bytes.toBytes("key_only");
parser.parse(line, line.length);
}
use of org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser in project hbase by apache.
the class TestImportTsvParser method testTsvParserNoTimestampValue.
@Test(expected = BadTsvLineException.class)
public void testTsvParserNoTimestampValue() throws BadTsvLineException {
TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a,HBASE_TS_KEY", "\t");
assertEquals(2, parser.getTimestampKeyColumnIndex());
byte[] line = Bytes.toBytes("rowkey\tval_a");
parser.parse(line, line.length);
}
Aggregations