use of org.apache.hadoop.fs.LocalFileSystem in project accumulo by apache.
the class RFileTest method testBadVisIterable.
@Test(expected = IllegalArgumentException.class)
public void testBadVisIterable() throws Exception {
// test append(iterable) method
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startDefaultLocalityGroup();
Key k1 = new Key("r1", "f1", "q1", "(A&(B");
Entry<Key, Value> entry = new AbstractMap.SimpleEntry<>(k1, new Value("".getBytes()));
writer.append(Collections.singletonList(entry));
}
}
use of org.apache.hadoop.fs.LocalFileSystem in project accumulo by apache.
the class RFileTest method testWrongGroup.
@Test(expected = IllegalArgumentException.class)
public void testWrongGroup() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startNewLocalityGroup("lg1", "fam1");
Key k1 = new Key("r1", "fam1", "q1");
writer.append(k1, new Value("".getBytes()));
writer.startDefaultLocalityGroup();
// should not be able to append the column family fam1 to default locality group
Key k2 = new Key("r1", "fam1", "q2");
writer.append(k2, new Value("".getBytes()));
}
}
use of org.apache.hadoop.fs.LocalFileSystem in project accumulo by apache.
the class RFileTest method testIllegalColumn.
@Test(expected = IllegalArgumentException.class)
public void testIllegalColumn() throws Exception {
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
try (RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build()) {
writer.startNewLocalityGroup("lg1", "fam1");
Key k1 = new Key("r1", "f1", "q1");
// should not be able to append the column family f1
writer.append(k1, new Value("".getBytes()));
}
}
use of org.apache.hadoop.fs.LocalFileSystem in project accumulo by apache.
the class RFileTest method testLocalityGroups.
@Test
public void testLocalityGroups() throws Exception {
SortedMap<Key, Value> testData1 = createTestData(0, 10, 0, 2, 10);
SortedMap<Key, Value> testData2 = createTestData(0, 10, 2, 1, 10);
SortedMap<Key, Value> defaultData = createTestData(0, 10, 3, 7, 10);
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
String testFile = createTmpTestFile();
RFileWriter writer = RFile.newWriter().to(testFile).withFileSystem(localFs).build();
writer.startNewLocalityGroup("z", colStr(0), colStr(1));
writer.append(testData1.entrySet());
writer.startNewLocalityGroup("h", colStr(2));
writer.append(testData2.entrySet());
writer.startDefaultLocalityGroup();
writer.append(defaultData.entrySet());
writer.close();
Scanner scanner = RFile.newScanner().from(testFile).withFileSystem(localFs).build();
scanner.fetchColumnFamily(new Text(colStr(0)));
scanner.fetchColumnFamily(new Text(colStr(1)));
Assert.assertEquals(testData1, toMap(scanner));
scanner.clearColumns();
scanner.fetchColumnFamily(new Text(colStr(2)));
Assert.assertEquals(testData2, toMap(scanner));
scanner.clearColumns();
for (int i = 3; i < 10; i++) {
scanner.fetchColumnFamily(new Text(colStr(i)));
}
Assert.assertEquals(defaultData, toMap(scanner));
scanner.clearColumns();
Assert.assertEquals(createTestData(10, 10, 10), toMap(scanner));
scanner.close();
Reader reader = getReader(localFs, testFile);
Map<String, ArrayList<ByteSequence>> lGroups = reader.getLocalityGroupCF();
Assert.assertTrue(lGroups.containsKey("z"));
Assert.assertTrue(lGroups.get("z").size() == 2);
Assert.assertTrue(lGroups.get("z").contains(new ArrayByteSequence(colStr(0))));
Assert.assertTrue(lGroups.get("z").contains(new ArrayByteSequence(colStr(1))));
Assert.assertTrue(lGroups.containsKey("h"));
Assert.assertEquals(Arrays.asList(new ArrayByteSequence(colStr(2))), lGroups.get("h"));
reader.close();
}
use of org.apache.hadoop.fs.LocalFileSystem in project accumulo by apache.
the class RFileTest method testMultipleFilesAndCache.
@Test
public void testMultipleFilesAndCache() throws Exception {
SortedMap<Key, Value> testData = createTestData(100, 10, 10);
List<String> files = Arrays.asList(createTmpTestFile(), createTmpTestFile(), createTmpTestFile());
LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
for (int i = 0; i < files.size(); i++) {
try (RFileWriter writer = RFile.newWriter().to(files.get(i)).withFileSystem(localFs).build()) {
for (Entry<Key, Value> entry : testData.entrySet()) {
if (entry.getKey().hashCode() % files.size() == i) {
writer.append(entry.getKey(), entry.getValue());
}
}
}
}
Scanner scanner = RFile.newScanner().from(files.toArray(new String[files.size()])).withFileSystem(localFs).withIndexCache(1000000).withDataCache(10000000).build();
Assert.assertEquals(testData, toMap(scanner));
scanner.close();
}
Aggregations