use of org.apache.spark.scheduler.SparkListenerJobEnd in project kylo by Teradata.
the class DeletePathsListenerTest method testFlag.
/**
* Verify file exists if flag is false.
*/
@Test
public void testFlag() {
final SparkListenerJobEnd event = new SparkListenerJobEnd(1, 0, JobSucceeded$.MODULE$);
apply(false, event);
Assert.assertTrue("Expected file to exist", tempFile.exists());
}
use of org.apache.spark.scheduler.SparkListenerJobEnd in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsNotDefinedWithoutQueryExecution.
@Test
public void testIsNotDefinedWithoutQueryExecution() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of org.apache.spark.scheduler.SparkListenerJobEnd in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsDefined.
@Test
public void testIsDefined() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).queryExecution(queryExecution).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of org.apache.spark.scheduler.SparkListenerJobEnd in project OpenLineage by OpenLineage.
the class AbstractQueryPlanDatasetBuilderTest method testApplyOnBuilderWithGenericArg.
@Test
public void testApplyOnBuilderWithGenericArg() {
SparkSession session = SparkSession.builder().config("spark.sql.warehouse.dir", "/tmp/warehouse").master("local").getOrCreate();
OpenLineage openLineage = new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI);
InputDataset expected = openLineage.newInputDataset("namespace", "the_name", null, null);
OpenLineageContext context = createContext(session, openLineage);
MyGenericArgInputDatasetBuilder<SparkListenerJobEnd> builder = new MyGenericArgInputDatasetBuilder<>(context, true, expected);
SparkListenerJobEnd jobEnd = new SparkListenerJobEnd(1, 2, null);
// Even though our instance of builder is parameterized with SparkListenerJobEnd, it's not
// *compiled* with that argument, so the isDefinedAt method fails to resolve the type arg
Assertions.assertFalse(((PartialFunction) builder).isDefinedAt(jobEnd));
}
use of org.apache.spark.scheduler.SparkListenerJobEnd in project OpenLineage by OpenLineage.
the class ErrorFacetBuilderTest method testIsUndefinedForJobSuccess.
@Test
public void testIsUndefinedForJobSuccess() {
SparkListenerJobEnd event = new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$);
assertThat(new ErrorFacetBuilder().isDefinedAt(event)).isFalse();
}
Aggregations