use of uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore in project Gaffer by gchq.
the class GetElementsInRangesHandlerTest method reInitialise.
@BeforeEach
public void reInitialise() throws StoreException {
handler = createHandler();
defaultView = new View.Builder().edge(TestGroups.EDGE).entity(TestGroups.ENTITY).build();
byteEntityStore = new SingleUseMiniAccumuloStore();
byteEntityStore.initialise("byteEntityGraph", SCHEMA, PROPERTIES);
gaffer1KeyStore = new SingleUseMiniAccumuloStore();
gaffer1KeyStore.initialise("gaffer1Graph", SCHEMA, CLASSIC_PROPERTIES);
setupGraph(byteEntityStore, NUM_ENTRIES);
setupGraph(gaffer1KeyStore, NUM_ENTRIES);
}
use of uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore in project Gaffer by gchq.
the class TableUtilsTest method shouldFailTableValidationWhenMissingAggregatorIterator.
@Test
public void shouldFailTableValidationWhenMissingAggregatorIterator() throws Exception {
final AccumuloStore store = new SingleUseMiniAccumuloStore();
final Schema schema = new Schema.Builder().type(TestTypes.ID_STRING, new TypeDefinition.Builder().aggregateFunction(new StringConcat()).validateFunctions(new Exists()).clazz(String.class).build()).type(TestTypes.DIRECTED_TRUE, Boolean.class).edge(TestGroups.EDGE, new SchemaEdgeDefinition.Builder().source(TestTypes.ID_STRING).destination(TestTypes.ID_STRING).directed(TestTypes.DIRECTED_TRUE).build()).build();
store.initialise(GRAPH_ID, schema, PROPERTIES);
final Runnable invalidateTable = () -> {
try {
AddUpdateTableIterator.removeIterator(store, AccumuloStoreConstants.AGGREGATOR_ITERATOR_NAME);
} catch (final StoreException e) {
throw new RuntimeException(e);
}
};
shouldFailTableValidationWhenTableInvalid(store, invalidateTable);
}
use of uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore in project Gaffer by gchq.
the class AccumuloStoreRelationTest method testBuildScanWithView.
private void testBuildScanWithView(final String name, final View view, final Predicate<Element> returnElement) throws OperationException, StoreException {
// Given
final SparkSession sparkSession = SparkSessionProvider.getSparkSession();
final Schema schema = getSchema();
final AccumuloStore store = new SingleUseMiniAccumuloStore();
store.initialise("graphId", schema, PROPERTIES);
addElements(store);
// When
final AccumuloStoreRelation relation = new AccumuloStoreRelation(SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null);
final RDD<Row> rdd = relation.buildScan();
final Row[] returnedElements = (Row[]) rdd.collect();
// Then
// - Actual results are:
final Set<Row> results = new HashSet<>(Arrays.asList(returnedElements));
// - Expected results are:
final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>());
final ConvertElementToRow elementConverter = new ConvertElementToRow(schemaConverter.getUsedProperties(), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty());
final Set<Row> expectedRows = new HashSet<>();
Streams.toStream(getElements()).filter(returnElement).map(elementConverter::apply).forEach(expectedRows::add);
assertEquals(expectedRows, results);
}
use of uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore in project Gaffer by gchq.
the class InputFormatTest method shouldReturnCorrectDataToMapReduceJob.
private void shouldReturnCorrectDataToMapReduceJob(final Schema schema, final KeyPackage kp, final List<Element> data, final GraphFilters graphFilters, final User user, final String instanceName, final Set<String> expectedResults, final java.nio.file.Path tempDir) throws Exception {
AccumuloProperties properties = PROPERTIES.clone();
SingleUseMiniAccumuloStore store = new SingleUseMiniAccumuloStore();
String graphId = null;
switch(kp) {
case BYTE_ENTITY_KEY_PACKAGE:
properties.setKeyPackageClass(ByteEntityKeyPackage.class.getName());
graphId = "byteEntityGraph";
break;
case CLASSIC_KEY_PACKAGE:
graphId = "gaffer1Graph";
properties.setKeyPackageClass(ClassicKeyPackage.class.getName());
}
try {
store.initialise(graphId, schema, properties);
} catch (final StoreException e) {
fail("StoreException thrown: " + e);
}
setupGraph(store, data);
// Set up local conf
final JobConf conf = new JobConf();
conf.set("fs.default.name", "file:///");
conf.set("mapred.job.tracker", "local");
final FileSystem fs = FileSystem.getLocal(conf);
// Update configuration with instance, table name, etc.
store.updateConfiguration(conf, graphFilters, user);
// Run Driver
final File outputFolder = Files.createDirectories(tempDir).toFile();
FileUtils.deleteDirectory(outputFolder);
final Driver driver = new Driver(outputFolder.getAbsolutePath());
driver.setConf(conf);
driver.run(new String[] {});
// Read results and check correct
final SequenceFile.Reader reader = new SequenceFile.Reader(fs, new Path(outputFolder + "/part-m-00000"), conf);
final Text text = new Text();
final Set<String> results = new HashSet<>();
while (reader.next(text)) {
results.add(text.toString());
}
reader.close();
assertEquals(expectedResults, results);
FileUtils.deleteDirectory(outputFolder);
}
use of uk.gov.gchq.gaffer.accumulostore.SingleUseMiniAccumuloStore in project Gaffer by gchq.
the class AccumuloStoreRelationTest method testBuildScanSpecifyColumnsWithView.
private void testBuildScanSpecifyColumnsWithView(final View view, final String[] requiredColumns, final Predicate<Element> returnElement) throws OperationException, StoreException {
// Given
final SparkSession sparkSession = SparkSessionProvider.getSparkSession();
final Schema schema = getSchema();
final AccumuloStore store = new SingleUseMiniAccumuloStore();
store.initialise("graphId", schema, PROPERTIES);
addElements(store);
// When
final AccumuloStoreRelation relation = new AccumuloStoreRelation(SparkContextUtil.createContext(new User(), sparkSession), Collections.emptyList(), view, store, null);
final RDD<Row> rdd = relation.buildScan(requiredColumns);
final Row[] returnedElements = (Row[]) rdd.collect();
// Then
// - Actual results are:
final Set<Row> results = new HashSet<>();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
// - Expected results are:
final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>());
final ConvertElementToRow elementConverter = new ConvertElementToRow(new LinkedHashSet<>(Arrays.asList(requiredColumns)), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty());
final Set<Row> expectedRows = new HashSet<>();
Streams.toStream(getElements()).filter(returnElement).map(elementConverter::apply).forEach(expectedRows::add);
assertEquals(expectedRows, results);
}
Aggregations