Search in sources :

Example 1 with Aggregate

use of org.apache.spark.sql.catalyst.plans.logical.Aggregate in project OpenLineage by OpenLineage.

the class LogicalPlanSerializerTest method testSerializeLogicalPlan.

@Test
public void testSerializeLogicalPlan() throws IOException {
    String jdbcUrl = "jdbc:postgresql://postgreshost:5432/sparkdata";
    String sparkTableName = "my_spark_table";
    scala.collection.immutable.Map<String, String> map = (scala.collection.immutable.Map<String, String>) Map$.MODULE$.<String, String>newBuilder().$plus$eq(Tuple2.apply("driver", Driver.class.getName())).result();
    JDBCRelation relation = new JDBCRelation(new StructType(new StructField[] { new StructField("name", StringType$.MODULE$, false, Metadata.empty()) }), new Partition[] {}, new JDBCOptions(jdbcUrl, sparkTableName, map), mock(SparkSession.class));
    LogicalRelation logicalRelation = new LogicalRelation(relation, Seq$.MODULE$.<AttributeReference>newBuilder().$plus$eq(new AttributeReference("name", StringType$.MODULE$, false, Metadata.empty(), ExprId.apply(1L), Seq$.MODULE$.<String>empty())).result(), Option.empty(), false);
    Aggregate aggregate = new Aggregate(Seq$.MODULE$.<Expression>empty(), Seq$.MODULE$.<NamedExpression>empty(), logicalRelation);
    Map<String, Object> aggregateActualNode = objectMapper.readValue(logicalPlanSerializer.serialize(aggregate), mapTypeReference);
    Map<String, Object> logicalRelationActualNode = objectMapper.readValue(logicalPlanSerializer.serialize(logicalRelation), mapTypeReference);
    Path expectedAggregateNodePath = Paths.get("src", "test", "resources", "test_data", "serde", "aggregate-node.json");
    Path logicalRelationNodePath = Paths.get("src", "test", "resources", "test_data", "serde", "logicalrelation-node.json");
    Map<String, Object> expectedAggregateNode = objectMapper.readValue(expectedAggregateNodePath.toFile(), mapTypeReference);
    Map<String, Object> expectedLogicalRelationNode = objectMapper.readValue(logicalRelationNodePath.toFile(), mapTypeReference);
    assertThat(aggregateActualNode).satisfies(new MatchesMapRecursively(expectedAggregateNode));
    assertThat(logicalRelationActualNode).satisfies(new MatchesMapRecursively(expectedLogicalRelationNode));
}
Also used : Path(java.nio.file.Path) SparkSession(org.apache.spark.sql.SparkSession) StructType(org.apache.spark.sql.types.StructType) AttributeReference(org.apache.spark.sql.catalyst.expressions.AttributeReference) JDBCRelation(org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation) LogicalRelation(org.apache.spark.sql.execution.datasources.LogicalRelation) StructField(org.apache.spark.sql.types.StructField) JDBCOptions(org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions) Aggregate(org.apache.spark.sql.catalyst.plans.logical.Aggregate) Map(java.util.Map) ImmutableMap(com.google.cloud.spark.bigquery.repackaged.com.google.common.collect.ImmutableMap) HashMap(scala.collection.immutable.HashMap) Test(org.junit.jupiter.api.Test)

Aggregations

ImmutableMap (com.google.cloud.spark.bigquery.repackaged.com.google.common.collect.ImmutableMap)1 Path (java.nio.file.Path)1 Map (java.util.Map)1 SparkSession (org.apache.spark.sql.SparkSession)1 AttributeReference (org.apache.spark.sql.catalyst.expressions.AttributeReference)1 Aggregate (org.apache.spark.sql.catalyst.plans.logical.Aggregate)1 LogicalRelation (org.apache.spark.sql.execution.datasources.LogicalRelation)1 JDBCOptions (org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions)1 JDBCRelation (org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation)1 StructField (org.apache.spark.sql.types.StructField)1 StructType (org.apache.spark.sql.types.StructType)1 Test (org.junit.jupiter.api.Test)1 HashMap (scala.collection.immutable.HashMap)1