use of org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog in project OpenLineage by OpenLineage.
the class JdbcHandler method getDatasetIdentifier.
@SneakyThrows
@Override
public DatasetIdentifier getDatasetIdentifier(SparkSession session, TableCatalog tableCatalog, Identifier identifier, Map<String, String> properties) {
JDBCTableCatalog catalog = (JDBCTableCatalog) tableCatalog;
JDBCOptions options = (JDBCOptions) FieldUtils.readField(catalog, "options", true);
String name = Stream.concat(Arrays.stream(identifier.namespace()), Stream.of(identifier.name())).collect(Collectors.joining("."));
return new DatasetIdentifier(name, JdbcUtils.sanitizeJdbcUrl(options.url()));
}
use of org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog in project OpenLineage by OpenLineage.
the class JdbcHandlerTest method testGetDatasetIdentifier.
@Test
@SneakyThrows
public void testGetDatasetIdentifier() {
JdbcHandler handler = new JdbcHandler();
JDBCTableCatalog tableCatalog = new JDBCTableCatalog();
JDBCOptions options = mock(JDBCOptions.class);
when(options.url()).thenReturn("jdbc:postgresql://postgreshost:5432");
FieldUtils.writeField(tableCatalog, "options", options, true);
DatasetIdentifier datasetIdentifier = handler.getDatasetIdentifier(mock(SparkSession.class), tableCatalog, Identifier.of(new String[] { "database", "schema" }, "table"), new HashMap<>());
assertEquals("database.schema.table", datasetIdentifier.getName());
assertEquals("postgresql://postgreshost:5432", datasetIdentifier.getNamespace());
}
Aggregations