use of org.apache.accumulo.core.data.Value in project hive by apache.
the class PrimitiveComparisonFilter method accept.
private boolean accept(Collection<Key> keys, Collection<Value> values) {
Iterator<Key> kIter = keys.iterator();
Iterator<Value> vIter = values.iterator();
while (kIter.hasNext()) {
Key k = kIter.next();
Value v = vIter.next();
if (matchQualAndFam(k)) {
return compOpt.accept(v.get());
}
}
return false;
}
use of org.apache.accumulo.core.data.Value in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testGetProtectedField.
@Test
public void testGetProtectedField() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
Mutation m = new Mutation("r4");
m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
writer.addMutation(m);
writer.close();
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
rowId = new Text("r2");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
rowId = new Text("r3");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
rowId = new Text("r4");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
assertFalse(reader.next(rowId, row));
}
use of org.apache.accumulo.core.data.Value in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testNameEqualBrian.
@Test
public void testNameEqualBrian() throws Exception {
Connector con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
Scanner scan = con.createScanner(TEST_TABLE, new Authorizations("blah"));
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1, PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64("brian".getBytes())));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:name");
scan.addScanIterator(is);
boolean foundName = false;
boolean foundSid = false;
boolean foundDegrees = false;
boolean foundMillis = false;
for (Map.Entry<Key, Value> kv : scan) {
SortedMap<Key, Value> items = PrimitiveComparisonFilter.decodeRow(kv.getKey(), kv.getValue());
for (Map.Entry<Key, Value> item : items.entrySet()) {
assertEquals(item.getKey().getRow().toString(), "r1");
if (item.getKey().getColumnQualifier().equals(NAME)) {
foundName = true;
assertArrayEquals(item.getValue().get(), "brian".getBytes());
} else if (item.getKey().getColumnQualifier().equals(SID)) {
foundSid = true;
assertArrayEquals(item.getValue().get(), parseIntBytes("1"));
} else if (item.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees = true;
assertArrayEquals(item.getValue().get(), parseDoubleBytes("44.5"));
} else if (item.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis = true;
assertArrayEquals(item.getValue().get(), parseLongBytes("555"));
}
}
}
assertTrue(foundDegrees & foundMillis & foundName & foundSid);
}
use of org.apache.accumulo.core.data.Value in project hive by apache.
the class TestHiveAccumuloTableInputFormat method createMockKeyValues.
@Before
public void createMockKeyValues() throws Exception {
// Make a MockInstance here, by setting the instance name to be the same as this mock instance
// we can "trick" the InputFormat into using a MockInstance
mockInstance = new MockInstance(test.getMethodName());
inputformat = new HiveAccumuloTableInputFormat();
conf = new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME, TEST_TABLE);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME, USER);
conf.set(AccumuloSerDeParameters.USER_PASS, PASS);
// not used for mock, but
conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
// required by input format.
columnNames = Arrays.asList("name", "sid", "dgrs", "mills");
columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.longTypeInfo);
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:name,cf:sid,cf:dgrs,cf:mills");
conf.set(serdeConstants.LIST_COLUMNS, "name,sid,dgrs,mills");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,double,bigint");
con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
con.tableOperations().create(TEST_TABLE);
con.securityOperations().changeUserAuthorizations(USER, new Authorizations("blah"));
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
Mutation m1 = new Mutation(new Text("r1"));
m1.put(COLUMN_FAMILY, NAME, new Value("brian".getBytes()));
m1.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("1")));
m1.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("44.5")));
m1.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("555")));
Mutation m2 = new Mutation(new Text("r2"));
m2.put(COLUMN_FAMILY, NAME, new Value("mark".getBytes()));
m2.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("2")));
m2.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("55.5")));
m2.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("666")));
Mutation m3 = new Mutation(new Text("r3"));
m3.put(COLUMN_FAMILY, NAME, new Value("dennis".getBytes()));
m3.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("3")));
m3.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("65.5")));
m3.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("777")));
writer.addMutation(m1);
writer.addMutation(m2);
writer.addMutation(m3);
writer.close();
}
use of org.apache.accumulo.core.data.Value in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testDegreesAndMillis.
@Test
public void testDegreesAndMillis() throws Exception {
Connector con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
Scanner scan = con.createScanner(TEST_TABLE, new Authorizations("blah"));
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1, PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, DoubleCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, GreaterThanOrEqual.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64(parseDoubleBytes("55.6"))));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:dgrs");
scan.addScanIterator(is);
IteratorSetting is2 = new IteratorSetting(2, PrimitiveComparisonFilter.FILTER_PREFIX + 2, PrimitiveComparisonFilter.class);
is2.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, LongCompare.class.getName());
is2.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, LessThan.class.getName());
is2.addOption(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64(parseLongBytes("778"))));
is2.addOption(PrimitiveComparisonFilter.COLUMN, "cf:mills");
scan.addScanIterator(is2);
boolean foundDennis = false;
int totalCount = 0;
for (Map.Entry<Key, Value> kv : scan) {
boolean foundName = false;
boolean foundSid = false;
boolean foundDegrees = false;
boolean foundMillis = false;
SortedMap<Key, Value> items = PrimitiveComparisonFilter.decodeRow(kv.getKey(), kv.getValue());
for (Map.Entry<Key, Value> item : items.entrySet()) {
SortedMap<Key, Value> nestedItems = PrimitiveComparisonFilter.decodeRow(item.getKey(), item.getValue());
for (Map.Entry<Key, Value> nested : nestedItems.entrySet()) {
if (nested.getKey().getRow().toString().equals("r3")) {
foundDennis = true;
}
if (nested.getKey().getColumnQualifier().equals(NAME)) {
foundName = true;
} else if (nested.getKey().getColumnQualifier().equals(SID)) {
foundSid = true;
} else if (nested.getKey().getColumnQualifier().equals(DEGREES)) {
foundDegrees = true;
} else if (nested.getKey().getColumnQualifier().equals(MILLIS)) {
foundMillis = true;
}
}
}
totalCount++;
assertTrue(foundDegrees & foundMillis & foundName & foundSid);
}
assertTrue(foundDennis);
assertEquals(totalCount, 1);
}
Aggregations