Search in sources :

Example 16 with SQLContext

use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.

the class GetDataFrameOfElementsExample method runExamples.

@Override
public void runExamples() {
    // Need to actively turn logging on and off as needed as Spark produces some logs
    // even when the log level is set to off.
    ROOT_LOGGER.setLevel(Level.OFF);
    final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("getDataFrameOfElementsWithEntityGroup").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
    final SparkContext sc = new SparkContext(sparkConf);
    sc.setLogLevel("OFF");
    final SQLContext sqlc = new SQLContext(sc);
    final Graph graph = getGraph();
    try {
        getDataFrameOfElementsWithEntityGroup(sqlc, graph);
        getDataFrameOfElementsWithEdgeGroup(sqlc, graph);
    } catch (final OperationException e) {
        throw new RuntimeException(e);
    }
    sc.stop();
    ROOT_LOGGER.setLevel(Level.INFO);
}
Also used : SparkContext(org.apache.spark.SparkContext) Graph(uk.gov.gchq.gaffer.graph.Graph) SparkConf(org.apache.spark.SparkConf) SQLContext(org.apache.spark.sql.SQLContext) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Example 17 with SQLContext

use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.

the class AccumuloStoreRelationTest method testBuildScanSpecifyColumnsAndFiltersWithView.

private void testBuildScanSpecifyColumnsAndFiltersWithView(final String name, final View view, final String[] requiredColumns, final Filter[] filters, final Predicate<Element> returnElement) throws OperationException, StoreException {
    // Given
    final SQLContext sqlContext = getSqlContext(name);
    final Schema schema = getSchema();
    final AccumuloProperties properties = AccumuloProperties.loadStoreProperties(getClass().getResourceAsStream("/store.properties"));
    final SingleUseMockAccumuloStore store = new SingleUseMockAccumuloStore();
    store.initialise(schema, properties);
    addElements(store);
    // When
    final AccumuloStoreRelation relation = new AccumuloStoreRelation(sqlContext, Collections.emptyList(), view, store, new User());
    final RDD<Row> rdd = relation.buildScan(requiredColumns, filters);
    final Row[] returnedElements = (Row[]) rdd.collect();
    // Then
    //  - Actual results are:
    final Set<Row> results = new HashSet<>();
    for (int i = 0; i < returnedElements.length; i++) {
        results.add(returnedElements[i]);
    }
    //  - Expected results are:
    final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>());
    final ConvertElementToRow elementConverter = new ConvertElementToRow(new LinkedHashSet<>(Arrays.asList(requiredColumns)), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty());
    final Set<Row> expectedRows = new HashSet<>();
    StreamSupport.stream(getElements().spliterator(), false).filter(returnElement).map(elementConverter::apply).forEach(expectedRows::add);
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : SingleUseMockAccumuloStore(uk.gov.gchq.gaffer.accumulostore.SingleUseMockAccumuloStore) User(uk.gov.gchq.gaffer.user.User) AccumuloProperties(uk.gov.gchq.gaffer.accumulostore.AccumuloProperties) Schema(uk.gov.gchq.gaffer.store.schema.Schema) ConvertElementToRow(uk.gov.gchq.gaffer.spark.operation.dataframe.ConvertElementToRow) Row(org.apache.spark.sql.Row) SchemaToStructTypeConverter(uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) ConvertElementToRow(uk.gov.gchq.gaffer.spark.operation.dataframe.ConvertElementToRow)

Example 18 with SQLContext

use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.

the class AccumuloStoreRelationTest method testBuildScanSpecifyColumnsWithView.

private void testBuildScanSpecifyColumnsWithView(final String name, final View view, final String[] requiredColumns, final Predicate<Element> returnElement) throws OperationException, StoreException {
    // Given
    final SQLContext sqlContext = getSqlContext(name);
    final Schema schema = getSchema();
    final AccumuloProperties properties = AccumuloProperties.loadStoreProperties(getClass().getResourceAsStream("/store.properties"));
    final SingleUseMockAccumuloStore store = new SingleUseMockAccumuloStore();
    store.initialise(schema, properties);
    addElements(store);
    // When
    final AccumuloStoreRelation relation = new AccumuloStoreRelation(sqlContext, Collections.emptyList(), view, store, new User());
    final RDD<Row> rdd = relation.buildScan(requiredColumns);
    final Row[] returnedElements = (Row[]) rdd.collect();
    // Then
    //  - Actual results are:
    final Set<Row> results = new HashSet<>();
    for (int i = 0; i < returnedElements.length; i++) {
        results.add(returnedElements[i]);
    }
    //  - Expected results are:
    final SchemaToStructTypeConverter schemaConverter = new SchemaToStructTypeConverter(schema, view, new ArrayList<>());
    final ConvertElementToRow elementConverter = new ConvertElementToRow(new LinkedHashSet<>(Arrays.asList(requiredColumns)), schemaConverter.getPropertyNeedsConversion(), schemaConverter.getConverterByProperty());
    final Set<Row> expectedRows = new HashSet<>();
    StreamSupport.stream(getElements().spliterator(), false).filter(returnElement).map(elementConverter::apply).forEach(expectedRows::add);
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : SingleUseMockAccumuloStore(uk.gov.gchq.gaffer.accumulostore.SingleUseMockAccumuloStore) User(uk.gov.gchq.gaffer.user.User) AccumuloProperties(uk.gov.gchq.gaffer.accumulostore.AccumuloProperties) Schema(uk.gov.gchq.gaffer.store.schema.Schema) ConvertElementToRow(uk.gov.gchq.gaffer.spark.operation.dataframe.ConvertElementToRow) Row(org.apache.spark.sql.Row) SchemaToStructTypeConverter(uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) ConvertElementToRow(uk.gov.gchq.gaffer.spark.operation.dataframe.ConvertElementToRow)

Example 19 with SQLContext

use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.

the class FilterToOperationConverterTest method testSpecifySource.

@Test
public void testSpecifySource() throws OperationException {
    final Schema schema = getSchema();
    final SQLContext sqlContext = getSqlContext("testSpecifySource");
    final Filter[] filters = new Filter[1];
    filters[0] = new EqualTo(SchemaToStructTypeConverter.SRC_COL_NAME, "0");
    FiltersToOperationConverter converter = new FiltersToOperationConverter(sqlContext, getViewFromSchema(schema), schema, filters);
    AbstractGetRDD<?> operation = converter.getOperation();
    assertTrue(operation instanceof GetRDDOfElements);
    assertEquals(0, operation.getView().getEntityGroups().size());
    assertEquals(EDGE_GROUPS, operation.getView().getEdgeGroups());
    final Set<EntitySeed> seeds = new HashSet<>();
    for (final Object seed : ((GetRDDOfElements) operation).getSeeds()) {
        seeds.add((EntitySeed) seed);
    }
    assertEquals(Collections.singleton(new EntitySeed("0")), seeds);
    sqlContext.sparkContext().stop();
}
Also used : Filter(org.apache.spark.sql.sources.Filter) Schema(uk.gov.gchq.gaffer.store.schema.Schema) EntitySeed(uk.gov.gchq.gaffer.operation.data.EntitySeed) SQLContext(org.apache.spark.sql.SQLContext) EqualTo(org.apache.spark.sql.sources.EqualTo) GetRDDOfElements(uk.gov.gchq.gaffer.spark.operation.scalardd.GetRDDOfElements) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 20 with SQLContext

use of org.apache.spark.sql.SQLContext in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkCanDealWithNonStandardProperties.

@Test
public void checkCanDealWithNonStandardProperties() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchemaNonstandardTypes.json", getElementsWithNonStandardProperties());
    final SQLContext sqlContext = getSqlContext("checkCanDealWithNonStandardProperties");
    // Edges group - check get correct edges
    GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().edge(EDGE_GROUP).build()).build();
    Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    Set<Row> results = new HashSet<>(dataFrame.collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    final MutableList<Object> fields1 = new MutableList<>();
    Map<String, Long> freqMap = Map$.MODULE$.empty();
    freqMap.put("Y", 1000L);
    freqMap.put("Z", 10000L);
    fields1.appendElem(EDGE_GROUP);
    fields1.appendElem("B");
    fields1.appendElem("C");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp = new HyperLogLogPlus(5, 5);
    hllpp.offer("AAA");
    hllpp.offer("BBB");
    fields1.appendElem(hllpp.cardinality());
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    // Entities group - check get correct entities
    dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).build()).build();
    dataFrame = graph.execute(dfOperation, new User());
    results.clear();
    results.addAll(dataFrame.collectAsList());
    expectedRows.clear();
    fields1.clear();
    freqMap.clear();
    freqMap.put("W", 10L);
    freqMap.put("X", 100L);
    fields1.appendElem(ENTITY_GROUP);
    fields1.appendElem("A");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp2 = new HyperLogLogPlus(5, 5);
    hllpp2.offer("AAA");
    fields1.appendElem(hllpp2.cardinality());
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) View(uk.gov.gchq.gaffer.data.elementdefinition.view.View) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) HyperLogLogPlus(com.clearspring.analytics.stream.cardinality.HyperLogLogPlus) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

SQLContext (org.apache.spark.sql.SQLContext)34 Test (org.junit.Test)20 HashSet (java.util.HashSet)15 Schema (uk.gov.gchq.gaffer.store.schema.Schema)14 Row (org.apache.spark.sql.Row)12 Filter (org.apache.spark.sql.sources.Filter)11 User (uk.gov.gchq.gaffer.user.User)11 View (uk.gov.gchq.gaffer.data.elementdefinition.view.View)10 EqualTo (org.apache.spark.sql.sources.EqualTo)9 Graph (uk.gov.gchq.gaffer.graph.Graph)9 GetDataFrameOfElements (uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements)8 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)7 MutableList (scala.collection.mutable.MutableList)7 ArrayList (java.util.ArrayList)6 DataFrame (org.apache.spark.sql.DataFrame)5 IsMoreThan (uk.gov.gchq.gaffer.function.filter.IsMoreThan)5 EntitySeed (uk.gov.gchq.gaffer.operation.data.EntitySeed)5 GetRDDOfElements (uk.gov.gchq.gaffer.spark.operation.scalardd.GetRDDOfElements)5 SparkConf (org.apache.spark.SparkConf)4 SparkContext (org.apache.spark.SparkContext)4