use of org.apache.spark.sql.execution.datasources.InsertIntoDataSourceCommand in project OpenLineage by OpenLineage.
the class LogicalPlanSerializerTest method testSerializeBigQueryPlan.
@Test
public void testSerializeBigQueryPlan() throws IOException {
String query = "SELECT date FROM bigquery-public-data.google_analytics_sample.test";
System.setProperty("GOOGLE_CLOUD_PROJECT", "test_serialization");
SparkBigQueryConfig config = SparkBigQueryConfig.from(ImmutableMap.of("query", query, "dataset", "test-dataset", "maxparallelism", "2", "partitionexpirationms", "2"), ImmutableMap.of(), new Configuration(), 10, SQLConf.get(), "", Optional.empty());
BigQueryRelation bigQueryRelation = new BigQueryRelation(config, TableInfo.newBuilder(TableId.of("dataset", "test"), new TestTableDefinition()).build(), mock(SQLContext.class));
LogicalRelation logicalRelation = new LogicalRelation(bigQueryRelation, Seq$.MODULE$.<AttributeReference>newBuilder().$plus$eq(new AttributeReference("name", StringType$.MODULE$, false, Metadata.empty(), ExprId.apply(1L), Seq$.MODULE$.<String>empty())).result(), Option.empty(), false);
InsertIntoDataSourceCommand command = new InsertIntoDataSourceCommand(logicalRelation, logicalRelation, false);
Map<String, Object> commandActualNode = objectMapper.readValue(logicalPlanSerializer.serialize(command), mapTypeReference);
Map<String, Object> bigqueryActualNode = objectMapper.readValue(logicalPlanSerializer.serialize(logicalRelation), mapTypeReference);
Path expectedCommandNodePath = Paths.get("src", "test", "resources", "test_data", "serde", "insertintods-node.json");
Path expectedBigQueryRelationNodePath = Paths.get("src", "test", "resources", "test_data", "serde", "bigqueryrelation-node.json");
Map<String, Object> expectedCommandNode = objectMapper.readValue(expectedCommandNodePath.toFile(), mapTypeReference);
Map<String, Object> expectedBigQueryRelationNode = objectMapper.readValue(expectedBigQueryRelationNodePath.toFile(), mapTypeReference);
assertThat(commandActualNode).satisfies(new MatchesMapRecursively(expectedCommandNode, Collections.singleton("exprId")));
assertThat(bigqueryActualNode).satisfies(new MatchesMapRecursively(expectedBigQueryRelationNode, Collections.singleton("exprId")));
}
Aggregations