Search in sources :

Example 1 with MutableList

use of scala.collection.mutable.MutableList in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkCanDealWithUserDefinedConversion.

@Test
public void checkCanDealWithUserDefinedConversion() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchemaUserDefinedConversion.json", getElementsForUserDefinedConversion());
    final SQLContext sqlContext = getSqlContext("checkCanDealWithUserDefinedConversion");
    // Edges group - check get correct edges
    final List<Converter> converters = new ArrayList<>();
    converters.add(new MyPropertyConverter());
    GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().edge(EDGE_GROUP).build()).converters(converters).build();
    Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    Set<Row> results = new HashSet<>(dataFrame.collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    final MutableList<Object> fields1 = new MutableList<>();
    Map<String, Long> freqMap = Map$.MODULE$.empty();
    freqMap.put("Y", 1000L);
    freqMap.put("Z", 10000L);
    fields1.appendElem(EDGE_GROUP);
    fields1.appendElem("B");
    fields1.appendElem("C");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp = new HyperLogLogPlus(5, 5);
    hllpp.offer("AAA");
    hllpp.offer("BBB");
    fields1.appendElem(hllpp.cardinality());
    fields1.appendElem(50);
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    // Entities group - check get correct entities
    dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).build()).converters(converters).build();
    dataFrame = graph.execute(dfOperation, new User());
    results.clear();
    results.addAll(dataFrame.collectAsList());
    expectedRows.clear();
    fields1.clear();
    freqMap.clear();
    freqMap.put("W", 10L);
    freqMap.put("X", 100L);
    fields1.appendElem(ENTITY_GROUP);
    fields1.appendElem("A");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp2 = new HyperLogLogPlus(5, 5);
    hllpp2.offer("AAA");
    fields1.appendElem(hllpp2.cardinality());
    fields1.appendElem(10);
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) ArrayList(java.util.ArrayList) View(uk.gov.gchq.gaffer.data.elementdefinition.view.View) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) HyperLogLogPlus(com.clearspring.analytics.stream.cardinality.HyperLogLogPlus) Converter(uk.gov.gchq.gaffer.spark.operation.dataframe.converter.property.Converter) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 2 with MutableList

use of scala.collection.mutable.MutableList in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkGetCorrectElementsInDataFrameMultipleGroups.

@Test
public void checkGetCorrectElementsInDataFrameMultipleGroups() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchema.json", getElements());
    final SQLContext sqlContext = getSqlContext("checkGetCorrectElementsInDataFrameMultipleGroups");
    // Use entity and edges group - check get correct data
    GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).edge(EDGE_GROUP).build()).build();
    Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    final Set<Row> results = new HashSet<>(dataFrame.collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields1 = new MutableList<>();
        fields1.appendElem(EDGE_GROUP);
        fields1.appendElem(null);
        fields1.appendElem(1);
        fields1.appendElem(2);
        fields1.appendElem(3.0F);
        fields1.appendElem(4.0D);
        fields1.appendElem(5L);
        fields1.appendElem(100L);
        fields1.appendElem("" + i);
        fields1.appendElem("B");
        expectedRows.add(Row$.MODULE$.fromSeq(fields1));
        final MutableList<Object> fields2 = new MutableList<>();
        fields2.appendElem(EDGE_GROUP);
        fields2.appendElem(null);
        fields2.appendElem(6);
        fields2.appendElem(7);
        fields2.appendElem(8.0F);
        fields2.appendElem(9.0D);
        fields2.appendElem(10L);
        fields2.appendElem(i * 200L);
        fields2.appendElem("" + i);
        fields2.appendElem("C");
        expectedRows.add(Row$.MODULE$.fromSeq(fields2));
        final MutableList<Object> fields3 = new MutableList<>();
        fields3.appendElem(ENTITY_GROUP);
        fields3.appendElem("" + i);
        fields3.appendElem(1);
        fields3.appendElem(i);
        fields3.appendElem(3.0F);
        fields3.appendElem(4.0D);
        fields3.appendElem(5L);
        fields3.appendElem(6);
        fields3.appendElem(null);
        fields3.appendElem(null);
        expectedRows.add(Row$.MODULE$.fromSeq(fields3));
    }
    assertEquals(expectedRows, results);
    // Entities group - check get correct entities
    dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).build()).build();
    dataFrame = graph.execute(dfOperation, new User());
    results.clear();
    results.addAll(dataFrame.collectAsList());
    expectedRows.clear();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields1 = new MutableList<>();
        fields1.clear();
        fields1.appendElem(ENTITY_GROUP);
        fields1.appendElem("" + i);
        fields1.appendElem(1);
        fields1.appendElem(i);
        fields1.appendElem(3.0F);
        fields1.appendElem(4.0D);
        fields1.appendElem(5L);
        fields1.appendElem(6);
        expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    }
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) View(uk.gov.gchq.gaffer.data.elementdefinition.view.View) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 3 with MutableList

use of scala.collection.mutable.MutableList in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkGetCorrectElementsInDataFrameWithProjectionAndFiltering.

@Test
public void checkGetCorrectElementsInDataFrameWithProjectionAndFiltering() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchema.json", getElements());
    final SQLContext sqlContext = getSqlContext("checkGetCorrectElementsInDataFrameWithProjectionAndFiltering");
    // Get DataFrame
    final GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().edge(EDGE_GROUP).build()).build();
    final Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    // Check get correct rows when ask for all columns but only rows where property2 > 4.0
    Set<Row> results = new HashSet<>(dataFrame.filter("property2 > 4.0").collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields = new MutableList<>();
        fields.appendElem(EDGE_GROUP);
        fields.appendElem("" + i);
        fields.appendElem("C");
        fields.appendElem(6);
        fields.appendElem(7);
        fields.appendElem(8.0F);
        fields.appendElem(9.0D);
        fields.appendElem(10L);
        fields.appendElem(i * 200L);
        expectedRows.add(Row$.MODULE$.fromSeq(fields));
    }
    assertEquals(expectedRows, results);
    // Check get correct rows when ask for columns property2 and property3 but only rows where property2 > 4.0
    results = new HashSet<>(dataFrame.select("property2", "property3").filter("property2 > 4.0").collectAsList());
    expectedRows.clear();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields = new MutableList<>();
        fields.appendElem(8.0F);
        fields.appendElem(9.0D);
        expectedRows.add(Row$.MODULE$.fromSeq(fields));
    }
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with MutableList

use of scala.collection.mutable.MutableList in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkCanDealWithNonStandardProperties.

@Test
public void checkCanDealWithNonStandardProperties() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchemaNonstandardTypes.json", getElementsWithNonStandardProperties());
    final SQLContext sqlContext = getSqlContext("checkCanDealWithNonStandardProperties");
    // Edges group - check get correct edges
    GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().edge(EDGE_GROUP).build()).build();
    Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    Set<Row> results = new HashSet<>(dataFrame.collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    final MutableList<Object> fields1 = new MutableList<>();
    Map<String, Long> freqMap = Map$.MODULE$.empty();
    freqMap.put("Y", 1000L);
    freqMap.put("Z", 10000L);
    fields1.appendElem(EDGE_GROUP);
    fields1.appendElem("B");
    fields1.appendElem("C");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp = new HyperLogLogPlus(5, 5);
    hllpp.offer("AAA");
    hllpp.offer("BBB");
    fields1.appendElem(hllpp.cardinality());
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    // Entities group - check get correct entities
    dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).build()).build();
    dataFrame = graph.execute(dfOperation, new User());
    results.clear();
    results.addAll(dataFrame.collectAsList());
    expectedRows.clear();
    fields1.clear();
    freqMap.clear();
    freqMap.put("W", 10L);
    freqMap.put("X", 100L);
    fields1.appendElem(ENTITY_GROUP);
    fields1.appendElem("A");
    fields1.appendElem(freqMap);
    final HyperLogLogPlus hllpp2 = new HyperLogLogPlus(5, 5);
    hllpp2.offer("AAA");
    fields1.appendElem(hllpp2.cardinality());
    expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) View(uk.gov.gchq.gaffer.data.elementdefinition.view.View) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) HyperLogLogPlus(com.clearspring.analytics.stream.cardinality.HyperLogLogPlus) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 5 with MutableList

use of scala.collection.mutable.MutableList in project Gaffer by gchq.

the class GetDataFrameOfElementsHandlerTest method checkGetCorrectElementsInDataFrame.

@Test
public void checkGetCorrectElementsInDataFrame() throws OperationException {
    final Graph graph = getGraph("/schema-DataFrame/dataSchema.json", getElements());
    final SQLContext sqlContext = getSqlContext("checkGetCorrectElementsInDataFrame");
    // Edges group - check get correct edges
    GetDataFrameOfElements dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().edge(EDGE_GROUP).build()).build();
    Dataset<Row> dataFrame = graph.execute(dfOperation, new User());
    Set<Row> results = new HashSet<>(dataFrame.collectAsList());
    final Set<Row> expectedRows = new HashSet<>();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields1 = new MutableList<>();
        fields1.appendElem(EDGE_GROUP);
        fields1.appendElem("" + i);
        fields1.appendElem("B");
        fields1.appendElem(1);
        fields1.appendElem(2);
        fields1.appendElem(3.0F);
        fields1.appendElem(4.0D);
        fields1.appendElem(5L);
        fields1.appendElem(100L);
        expectedRows.add(Row$.MODULE$.fromSeq(fields1));
        final MutableList<Object> fields2 = new MutableList<>();
        fields2.appendElem(EDGE_GROUP);
        fields2.appendElem("" + i);
        fields2.appendElem("C");
        fields2.appendElem(6);
        fields2.appendElem(7);
        fields2.appendElem(8.0F);
        fields2.appendElem(9.0D);
        fields2.appendElem(10L);
        fields2.appendElem(i * 200L);
        expectedRows.add(Row$.MODULE$.fromSeq(fields2));
    }
    assertEquals(expectedRows, results);
    // Entities group - check get correct entities
    dfOperation = new GetDataFrameOfElements.Builder().sqlContext(sqlContext).view(new View.Builder().entity(ENTITY_GROUP).build()).build();
    dataFrame = graph.execute(dfOperation, new User());
    results.clear();
    results.addAll(dataFrame.collectAsList());
    expectedRows.clear();
    for (int i = 0; i < NUM_ELEMENTS; i++) {
        final MutableList<Object> fields1 = new MutableList<>();
        fields1.clear();
        fields1.appendElem(ENTITY_GROUP);
        fields1.appendElem("" + i);
        fields1.appendElem(1);
        fields1.appendElem(i);
        fields1.appendElem(3.0F);
        fields1.appendElem(4.0D);
        fields1.appendElem(5L);
        fields1.appendElem(6);
        expectedRows.add(Row$.MODULE$.fromSeq(fields1));
    }
    assertEquals(expectedRows, results);
    sqlContext.sparkContext().stop();
}
Also used : GetDataFrameOfElements(uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements) User(uk.gov.gchq.gaffer.user.User) View(uk.gov.gchq.gaffer.data.elementdefinition.view.View) Graph(uk.gov.gchq.gaffer.graph.Graph) MutableList(scala.collection.mutable.MutableList) Row(org.apache.spark.sql.Row) SQLContext(org.apache.spark.sql.SQLContext) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

HashSet (java.util.HashSet)7 Row (org.apache.spark.sql.Row)7 SQLContext (org.apache.spark.sql.SQLContext)7 Test (org.junit.Test)7 MutableList (scala.collection.mutable.MutableList)7 Graph (uk.gov.gchq.gaffer.graph.Graph)7 GetDataFrameOfElements (uk.gov.gchq.gaffer.spark.operation.dataframe.GetDataFrameOfElements)7 User (uk.gov.gchq.gaffer.user.User)7 View (uk.gov.gchq.gaffer.data.elementdefinition.view.View)5 HyperLogLogPlus (com.clearspring.analytics.stream.cardinality.HyperLogLogPlus)2 ArrayList (java.util.ArrayList)1 ElementFilter (uk.gov.gchq.gaffer.data.element.function.ElementFilter)1 ViewElementDefinition (uk.gov.gchq.gaffer.data.elementdefinition.view.ViewElementDefinition)1 IsMoreThan (uk.gov.gchq.gaffer.function.filter.IsMoreThan)1 Converter (uk.gov.gchq.gaffer.spark.operation.dataframe.converter.property.Converter)1