use of io.openlineage.client.OpenLineage in project OpenLineage by OpenLineage.
the class SQLDWDatabricksVisitorTest method testSQLDWRelationBadJdbcUrl.
@Test
void testSQLDWRelationBadJdbcUrl() {
String inputName = "dbo.mytable";
String inputJdbcUrl = "sqlserver://MYTESTSERVER.database.windows.net:1433;database=MYTESTDB";
// Instantiate a MockSQLDWRelation
LogicalRelation lr = new LogicalRelation(new MockSqlDWBaseRelation(inputName, inputJdbcUrl), Seq$.MODULE$.<AttributeReference>newBuilder().$plus$eq(new AttributeReference("name", StringType$.MODULE$, false, null, ExprId.apply(1L), Seq$.MODULE$.<String>empty())).result(), Option.empty(), false);
TestSqlDWDatabricksVisitor visitor = new TestSqlDWDatabricksVisitor(SparkAgentTestExtension.newContext(session), DatasetFactory.output(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)));
List<OpenLineage.Dataset> datasets = visitor.apply(lr);
assertEquals(0, datasets.size());
}
use of io.openlineage.client.OpenLineage in project OpenLineage by OpenLineage.
the class SQLDWDatabricksVisitorTest method testSQLDWRelation.
@Test
void testSQLDWRelation() {
String inputName = "\"dbo\".\"table1\"";
String inputJdbcUrl = "jdbc:sqlserver://MYTESTSERVER.database.windows.net:1433;database=MYTESTDB";
String expectedName = "dbo.table1";
String expectedNamespace = "sqlserver://MYTESTSERVER.database.windows.net:1433;database=MYTESTDB;";
// Instantiate a MockSQLDWRelation
LogicalRelation lr = new LogicalRelation(new MockSqlDWBaseRelation(inputName, inputJdbcUrl), Seq$.MODULE$.<AttributeReference>newBuilder().$plus$eq(new AttributeReference("name", StringType$.MODULE$, false, null, ExprId.apply(1L), Seq$.MODULE$.<String>empty())).result(), Option.empty(), false);
TestSqlDWDatabricksVisitor visitor = new TestSqlDWDatabricksVisitor(SparkAgentTestExtension.newContext(session), DatasetFactory.output(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)));
List<OpenLineage.Dataset> datasets = visitor.apply(lr);
assertEquals(1, datasets.size());
OpenLineage.Dataset ds = datasets.get(0);
assertEquals(expectedNamespace, ds.getNamespace());
assertEquals(expectedName, ds.getName());
}
use of io.openlineage.client.OpenLineage in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsNotDefinedWithoutQueryExecution.
@Test
public void testIsNotDefinedWithoutQueryExecution() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isFalse();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of io.openlineage.client.OpenLineage in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testIsDefined.
@Test
public void testIsDefined() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).queryExecution(queryExecution).build());
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionStart(1L, "", "", "", null, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerSQLExecutionEnd(1L, 1L))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobEnd(1, 1L, JobSucceeded$.MODULE$))).isTrue();
assertThat(builder.isDefinedAt(new SparkListenerJobStart(1, 1L, Seq$.MODULE$.empty(), new Properties()))).isFalse();
}
use of io.openlineage.client.OpenLineage in project OpenLineage by OpenLineage.
the class LogicalPlanRunFacetBuilderTest method testBuild.
@Test
public void testBuild() {
LogicalPlanRunFacetBuilder builder = new LogicalPlanRunFacetBuilder(OpenLineageContext.builder().sparkContext(sparkContext).openLineage(new OpenLineage(OpenLineageClient.OPEN_LINEAGE_CLIENT_URI)).queryExecution(queryExecution).build());
Map<String, RunFacet> facetMap = new HashMap<>();
builder.build(new SparkListenerSQLExecutionEnd(1L, 1L), facetMap::put);
assertThat(facetMap).hasEntrySatisfying("spark.logicalPlan", facet -> assertThat(facet).hasFieldOrPropertyWithValue("plan", queryExecution.optimizedPlan().toJSON()));
}
Aggregations