use of org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsNotDefinedWithoutQueryExecution.
@Test
public void testIsNotDefinedWithoutQueryExecution() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsDefined.
@Test
public void testIsDefined() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).queryExecution(queryExecution).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart in project OpenLineage by OpenLineage.
the class OpenLineageSparkListenerTest method testSqlEventWithJobEventEmitsOnce.
@Test
public void testSqlEventWithJobEventEmitsOnce() {
SparkSession sparkSession = mock(SparkSession.class);
SparkContext sparkContext = mock(SparkContext.class);
EventEmitter emitter = mock(EventEmitter.class);
QueryExecution qe = mock(QueryExecution.class);
LogicalPlan query = UnresolvedRelation$.MODULE$.apply(TableIdentifier.apply("tableName"));
SparkPlan plan = mock(SparkPlan.class);
when(sparkSession.sparkContext()).thenReturn(sparkContext);
when(sparkContext.appName()).thenReturn("appName");
when(qe.optimizedPlan()).thenReturn(new InsertIntoHadoopFsRelationCommand(new Path("file:///tmp/dir"), null, false, Seq$.MODULE$.empty(), Option.empty(), null, Map$.MODULE$.empty(), query, SaveMode.Overwrite, Option.empty(), Option.empty(), Seq$.MODULE$.<String>empty()));
when(qe.executedPlan()).thenReturn(plan);
when(plan.sparkContext()).thenReturn(sparkContext);
when(plan.nodeName()).thenReturn("execute");
OpenLineageContext olContext = OpenLineageContext.builder().sparkSession(Optional.of(sparkSession)).sparkContext(sparkSession.sparkContext()).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).queryExecution(qe).build();
olContext.getOutputDatasetQueryPlanVisitors().add(new InsertIntoHadoopFsRelationVisitor(olContext));
ExecutionContext executionContext = new StaticExecutionContextFactory(emitter).createSparkSQLExecutionContext(1L, emitter, qe, olContext);
executionContext.start(new SparkListenerSQLExecutionStart(1L, "", "", "", new SparkPlanInfo("name", "string", Seq$.MODULE$.empty(), Map$.MODULE$.empty(), Seq$.MODULE$.empty()), 1L));
executionContext.start(new SparkListenerJobStart(0, 2L, Seq$.MODULE$.<StageInfo>empty(), new Properties()));
ArgumentCaptor<OpenLineage.RunEvent> lineageEvent = ArgumentCaptor.forClass(OpenLineage.RunEvent.class);
verify(emitter, times(2)).emit(lineageEvent.capture());
}
use of org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart in project OpenLineage by OpenLineage.
the class SparkVersionFacetBuilderTest method testIsDefinedForSparkListenerEvents.
@Test
public void testIsDefinedForSparkListenerEvents() {
SparkVersionFacetBuilder builder = new SparkVersionFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "abc", "abc", "abc", null, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerStageSubmitted(null, new Properties()))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerStageCompleted(null))).isTrue();
}
Aggregations