use of org.apache.spark.SparkConf in project Gaffer by gchq.
the class GetJavaRDDOfAllElementsHandlerTest method checkGetAllElementsInJavaRDD.
@Test
public void checkGetAllElementsInJavaRDD() throws OperationException, IOException {
final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
final List<Element> elements = new ArrayList<>();
final Set<Element> expectedElements = new HashSet<>();
for (int i = 0; i < 10; i++) {
final Entity entity = new Entity(TestGroups.ENTITY);
entity.setVertex("" + i);
final Edge edge1 = new Edge(TestGroups.EDGE);
edge1.setSource("" + i);
edge1.setDestination("B");
edge1.setDirected(false);
edge1.putProperty(TestPropertyNames.COUNT, 2);
final Edge edge2 = new Edge(TestGroups.EDGE);
edge2.setSource("" + i);
edge2.setDestination("C");
edge2.setDirected(false);
edge2.putProperty(TestPropertyNames.COUNT, 4);
elements.add(edge1);
elements.add(edge2);
elements.add(entity);
expectedElements.add(edge1);
expectedElements.add(edge2);
expectedElements.add(entity);
}
final User user = new User();
graph1.execute(new AddElements(elements), user);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("testCheckGetCorrectElementsInJavaRDDForEntitySeed").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);
// Create Hadoop configuration and serialise to a string
final Configuration configuration = new Configuration();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
configuration.write(new DataOutputStream(baos));
final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
// Check get correct edges for "1"
final GetJavaRDDOfAllElements rddQuery = new GetJavaRDDOfAllElements.Builder().javaSparkContext(sparkContext).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
final JavaRDD<Element> rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
final Set<Element> results = new HashSet<>(rdd.collect());
assertEquals(expectedElements, results);
sparkContext.stop();
}
use of org.apache.spark.SparkConf in project Gaffer by gchq.
the class GetJavaRDDOfElementsHandlerTest method checkGetCorrectElementsInRDDForEdgeSeed.
@Test
public void checkGetCorrectElementsInRDDForEdgeSeed() throws OperationException, IOException {
final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
final List<Element> elements = new ArrayList<>();
for (int i = 0; i < 10; i++) {
final Entity entity = new Entity(ENTITY_GROUP);
entity.setVertex("" + i);
final Edge edge1 = new Edge(EDGE_GROUP);
edge1.setSource("" + i);
edge1.setDestination("B");
edge1.setDirected(false);
edge1.putProperty("count", 2);
final Edge edge2 = new Edge(EDGE_GROUP);
edge2.setSource("" + i);
edge2.setDestination("C");
edge2.setDirected(false);
edge2.putProperty("count", 4);
elements.add(edge1);
elements.add(edge2);
elements.add(entity);
}
final User user = new User();
graph1.execute(new AddElements(elements), user);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("testCheckGetCorrectElementsInJavaRDDForEdgeSeed").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);
// Create Hadoop configuration and serialise to a string
final Configuration configuration = new Configuration();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
configuration.write(new DataOutputStream(baos));
final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
// Check get correct edges for EdgeSeed 1 -> B
GetJavaRDDOfElements<EdgeSeed> rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).setIncludeEdges(GetOperation.IncludeEdgeType.ALL).setIncludeEntities(false).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
JavaRDD<Element> rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
final Set<Element> results = new HashSet<>();
results.addAll(rdd.collect());
final Set<Element> expectedElements = new HashSet<>();
final Edge edge1B = new Edge(EDGE_GROUP);
edge1B.setSource("1");
edge1B.setDestination("B");
edge1B.setDirected(false);
edge1B.putProperty("count", 2);
expectedElements.add(edge1B);
assertEquals(expectedElements, results);
// Check get entity for 1 when query for 1 -> B and specify entities only
rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).setIncludeEntities(true).setIncludeEdges(GetOperation.IncludeEdgeType.NONE).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
results.addAll(rdd.collect());
expectedElements.clear();
final Entity entity1 = new Entity(ENTITY_GROUP);
entity1.setVertex("1");
expectedElements.add(entity1);
assertEquals(expectedElements, results);
// Check get correct edges for 1 -> B when specify edges only
rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).view(new View.Builder().edge(EDGE_GROUP).build()).setIncludeEntities(false).setIncludeEdges(GetOperation.IncludeEdgeType.ALL).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
results.addAll(rdd.collect());
expectedElements.clear();
expectedElements.add(edge1B);
assertEquals(expectedElements, results);
// Check get correct edges for 1 -> B and 5 -> C
Set<EdgeSeed> seeds = new HashSet<>();
seeds.add(new EdgeSeed("1", "B", false));
seeds.add(new EdgeSeed("5", "C", false));
rddQuery = new GetJavaRDDOfElements.Builder<EdgeSeed>().javaSparkContext(sparkContext).setIncludeEntities(false).seeds(seeds).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
results.addAll(rdd.collect());
final Edge edge5C = new Edge(EDGE_GROUP);
edge5C.setSource("5");
edge5C.setDestination("C");
edge5C.setDirected(false);
edge5C.putProperty("count", 4);
expectedElements.clear();
expectedElements.add(edge1B);
expectedElements.add(edge5C);
assertEquals(expectedElements, results);
sparkContext.stop();
}
use of org.apache.spark.SparkConf in project Gaffer by gchq.
the class GetRDDOfElementsHandlerTest method checkGetCorrectElementsInRDDForEntitySeed.
@Test
public void checkGetCorrectElementsInRDDForEntitySeed() throws OperationException, IOException {
final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
final List<Element> elements = new ArrayList<>();
for (int i = 0; i < 10; i++) {
final Entity entity = new Entity(ENTITY_GROUP);
entity.setVertex("" + i);
final Edge edge1 = new Edge(EDGE_GROUP);
edge1.setSource("" + i);
edge1.setDestination("B");
edge1.setDirected(false);
edge1.putProperty("count", 2);
final Edge edge2 = new Edge(EDGE_GROUP);
edge2.setSource("" + i);
edge2.setDestination("C");
edge2.setDirected(false);
edge2.putProperty("count", 4);
elements.add(edge1);
elements.add(edge2);
elements.add(entity);
}
final User user = new User();
graph1.execute(new AddElements(elements), user);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("testCheckGetCorrectElementsInRDDForEntitySeed").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final SparkContext sparkContext = new SparkContext(sparkConf);
// Create Hadoop configuration and serialise to a string
final Configuration configuration = new Configuration();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
configuration.write(new DataOutputStream(baos));
final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
// Check get correct edges for "1"
GetRDDOfElements<EntitySeed> rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
RDD<Element> rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
Set<Element> results = new HashSet<>();
// NB: IDE suggests the cast in the following line is unnecessary but compilation fails without it
Element[] returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
final Set<Element> expectedElements = new HashSet<>();
final Entity entity1 = new Entity(ENTITY_GROUP);
entity1.setVertex("1");
final Edge edge1B = new Edge(EDGE_GROUP);
edge1B.setSource("1");
edge1B.setDestination("B");
edge1B.setDirected(false);
edge1B.putProperty("count", 2);
final Edge edge1C = new Edge(EDGE_GROUP);
edge1C.setSource("1");
edge1C.setDestination("C");
edge1C.setDirected(false);
edge1C.putProperty("count", 4);
expectedElements.add(entity1);
expectedElements.add(edge1B);
expectedElements.add(edge1C);
assertEquals(expectedElements, results);
// Check get correct edges for "1" when specify entities only
rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().entity(ENTITY_GROUP).build()).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
expectedElements.clear();
expectedElements.add(entity1);
assertEquals(expectedElements, results);
// Check get correct edges for "1" when specify edges only
rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EntitySeed("1"))).view(new View.Builder().edge(EDGE_GROUP).build()).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
expectedElements.clear();
expectedElements.add(edge1B);
expectedElements.add(edge1C);
assertEquals(expectedElements, results);
// Check get correct edges for "1" and "5"
Set<EntitySeed> seeds = new HashSet<>();
seeds.add(new EntitySeed("1"));
seeds.add(new EntitySeed("5"));
rddQuery = new GetRDDOfElements.Builder<EntitySeed>().sparkContext(sparkContext).seeds(seeds).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
final Entity entity5 = new Entity(ENTITY_GROUP);
entity5.setVertex("5");
final Edge edge5B = new Edge(EDGE_GROUP);
edge5B.setSource("5");
edge5B.setDestination("B");
edge5B.setDirected(false);
edge5B.putProperty("count", 2);
final Edge edge5C = new Edge(EDGE_GROUP);
edge5C.setSource("5");
edge5C.setDestination("C");
edge5C.setDirected(false);
edge5C.putProperty("count", 4);
expectedElements.clear();
expectedElements.add(entity1);
expectedElements.add(edge1B);
expectedElements.add(edge1C);
expectedElements.add(entity5);
expectedElements.add(edge5B);
expectedElements.add(edge5C);
assertEquals(expectedElements, results);
sparkContext.stop();
}
use of org.apache.spark.SparkConf in project Gaffer by gchq.
the class GetRDDOfElementsHandlerTest method checkGetCorrectElementsInRDDForEdgeSeed.
@Test
public void checkGetCorrectElementsInRDDForEdgeSeed() throws OperationException, IOException {
final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
final List<Element> elements = new ArrayList<>();
for (int i = 0; i < 10; i++) {
final Entity entity = new Entity(ENTITY_GROUP);
entity.setVertex("" + i);
final Edge edge1 = new Edge(EDGE_GROUP);
edge1.setSource("" + i);
edge1.setDestination("B");
edge1.setDirected(false);
edge1.putProperty("count", 2);
final Edge edge2 = new Edge(EDGE_GROUP);
edge2.setSource("" + i);
edge2.setDestination("C");
edge2.setDirected(false);
edge2.putProperty("count", 4);
elements.add(edge1);
elements.add(edge2);
elements.add(entity);
}
final User user = new User();
graph1.execute(new AddElements(elements), user);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("testCheckGetCorrectElementsInRDDForEdgeSeed").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final SparkContext sparkContext = new SparkContext(sparkConf);
// Create Hadoop configuration and serialise to a string
final Configuration configuration = new Configuration();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
configuration.write(new DataOutputStream(baos));
final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
// Check get correct edges for EdgeSeed 1 -> B
GetRDDOfElements<EdgeSeed> rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).includeEdges(GetOperation.IncludeEdgeType.ALL).includeEntities(false).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
RDD<Element> rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
Set<Element> results = new HashSet<>();
// NB: IDE suggests the cast in the following line is unnecessary but compilation fails without it
Element[] returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
final Set<Element> expectedElements = new HashSet<>();
final Edge edge1B = new Edge(EDGE_GROUP);
edge1B.setSource("1");
edge1B.setDestination("B");
edge1B.setDirected(false);
edge1B.putProperty("count", 2);
expectedElements.add(edge1B);
assertEquals(expectedElements, results);
// Check get entity for 1 when query for 1 -> B and specify entities only
rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).includeEntities(true).includeEdges(GetOperation.IncludeEdgeType.NONE).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
expectedElements.clear();
final Entity entity1 = new Entity(ENTITY_GROUP);
entity1.setVertex("1");
expectedElements.add(entity1);
assertEquals(expectedElements, results);
// Check get correct edges for 1 -> B when specify edges only
rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext).seeds(Collections.singleton(new EdgeSeed("1", "B", false))).view(new View.Builder().edge(EDGE_GROUP).build()).includeEntities(false).includeEdges(GetOperation.IncludeEdgeType.ALL).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
expectedElements.clear();
expectedElements.add(edge1B);
assertEquals(expectedElements, results);
// Check get correct edges for 1 -> B and 5 -> C
Set<EdgeSeed> seeds = new HashSet<>();
seeds.add(new EdgeSeed("1", "B", false));
seeds.add(new EdgeSeed("5", "C", false));
rddQuery = new GetRDDOfElements.Builder<EdgeSeed>().sparkContext(sparkContext).includeEntities(false).seeds(seeds).build();
rddQuery.addOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString);
rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
results.clear();
returnedElements = (Element[]) rdd.collect();
for (int i = 0; i < returnedElements.length; i++) {
results.add(returnedElements[i]);
}
final Edge edge5C = new Edge(EDGE_GROUP);
edge5C.setSource("5");
edge5C.setDestination("C");
edge5C.setDirected(false);
edge5C.putProperty("count", 4);
expectedElements.clear();
expectedElements.add(edge1B);
expectedElements.add(edge5C);
assertEquals(expectedElements, results);
sparkContext.stop();
}
use of org.apache.spark.SparkConf in project Gaffer by gchq.
the class ImportKeyValuePairRDDToAccumuloHandlerTest method checkImportRDDOfElements.
@Test
public void checkImportRDDOfElements() throws OperationException, IOException {
final Graph graph1 = new Graph.Builder().addSchema(getClass().getResourceAsStream("/schema/dataSchema.json")).addSchema(getClass().getResourceAsStream("/schema/dataTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeTypes.json")).addSchema(getClass().getResourceAsStream("/schema/storeSchema.json")).storeProperties(getClass().getResourceAsStream("/store.properties")).build();
final ArrayBuffer<Element> elements = new ArrayBuffer<>();
for (int i = 0; i < 10; i++) {
final Entity entity = new Entity(TestGroups.ENTITY);
entity.setVertex("" + i);
final Edge edge1 = new Edge(TestGroups.EDGE);
edge1.setSource("" + i);
edge1.setDestination("B");
edge1.setDirected(false);
edge1.putProperty(TestPropertyNames.COUNT, 2);
final Edge edge2 = new Edge(TestGroups.EDGE);
edge2.setSource("" + i);
edge2.setDestination("C");
edge2.setDirected(false);
edge2.putProperty(TestPropertyNames.COUNT, 4);
elements.$plus$eq(edge1);
elements.$plus$eq(edge2);
elements.$plus$eq(entity);
}
final User user = new User();
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("tests").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final SparkContext sparkContext = new SparkContext(sparkConf);
// Create Hadoop configuration and serialise to a string
final Configuration configuration = new Configuration();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
configuration.write(new DataOutputStream(baos));
final String configurationString = new String(baos.toByteArray(), CommonConstants.UTF_8);
final String outputPath = this.getClass().getResource("/").getPath().toString() + "load";
final String failurePath = this.getClass().getResource("/").getPath().toString() + "failure";
final File file = new File(outputPath);
if (file.exists()) {
FileUtils.forceDelete(file);
}
final ElementConverterFunction func = new ElementConverterFunction(sparkContext.broadcast(new ByteEntityAccumuloElementConverter(graph1.getSchema()), ACCUMULO_ELEMENT_CONVERTER_CLASS_TAG));
final RDD<Tuple2<Key, Value>> elementRDD = sparkContext.parallelize(elements, 1, ELEMENT_CLASS_TAG).flatMap(func, TUPLE2_CLASS_TAG);
final ImportKeyValuePairRDDToAccumulo addRdd = new ImportKeyValuePairRDDToAccumulo.Builder().input(elementRDD).outputPath(outputPath).failurePath(failurePath).build();
graph1.execute(addRdd, user);
FileUtils.forceDelete(file);
// Check all elements were added
final GetRDDOfAllElements rddQuery = new GetRDDOfAllElements.Builder().sparkContext(sparkContext).option(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY, configurationString).build();
final RDD<Element> rdd = graph1.execute(rddQuery, user);
if (rdd == null) {
fail("No RDD returned");
}
final Set<Element> results = new HashSet<>();
final Element[] returnedElements = (Element[]) rdd.collect();
Collections.addAll(results, returnedElements);
assertEquals(elements.size(), results.size());
sparkContext.stop();
}
Aggregations