use of com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource in project kylo by Teradata.
the class ExecuteSparkJob method getDatasources.
private Map<String, String> getDatasources(ProcessSession session, FlowFile flowFile, String PROVENANCE_JOB_STATUS_KEY, String datasourceIds, MetadataProviderService metadataService, List<String> extraJarPaths) throws JsonProcessingException {
final Map<String, String> env = new HashMap<>();
if (StringUtils.isNotBlank(datasourceIds)) {
final StringBuilder datasources = new StringBuilder(10240);
final ObjectMapper objectMapper = new ObjectMapper();
final MetadataProvider provider = metadataService.getProvider();
for (final String id : datasourceIds.split(",")) {
datasources.append((datasources.length() == 0) ? '[' : ',');
final Optional<Datasource> datasource = provider.getDatasource(id);
if (datasource.isPresent()) {
if (datasource.get() instanceof JdbcDatasource && StringUtils.isNotBlank(((JdbcDatasource) datasource.get()).getDatabaseDriverLocation())) {
final String[] databaseDriverLocations = ((JdbcDatasource) datasource.get()).getDatabaseDriverLocation().split(",");
extraJarPaths.addAll(Arrays.asList(databaseDriverLocations));
}
datasources.append(objectMapper.writeValueAsString(datasource.get()));
} else {
getLog().error("Required datasource {} is missing for Spark job: {}", new Object[] { id, flowFile });
flowFile = session.putAttribute(flowFile, PROVENANCE_JOB_STATUS_KEY, "Invalid data source: " + id);
session.transfer(flowFile, REL_FAILURE);
return null;
}
}
datasources.append(']');
env.put("DATASOURCES", datasources.toString());
}
return env;
}
use of com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource in project kylo by Teradata.
the class DatasourceController method getTablesAndColumns.
/**
* Gets the tables and their columns from the specified data source for given schema
*
* @param idStr the data source id
* @param schema the schema name, or {@code null} for all schemas
* @return the list of tables and their columns
*/
@GET
@Path("{id}/table-columns")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the tables and their columns from the data source for given schema", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns tables and columns", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response getTablesAndColumns(@PathParam("id") final String idStr, @QueryParam("schema") final String schema) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table names using system user
return metadata.read(() -> {
JdbcDatasource datasource = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
List<String> tableNamesForDatasource = dbcpConnectionPoolTableInfo.getTableNamesForDatasource(datasource, schema, null);
List<DatabaseMetadata> tables = new ArrayList<>();
if (tableNamesForDatasource != null) {
tables = tableNamesForDatasource.stream().flatMap(schemaNameDotTableName -> {
String tableName = schemaNameDotTableName.substring(schema.length() + 1);
TableSchema tableSchema = dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName);
return tableSchema.getFields().stream().map(field -> {
DefaultDatabaseMetadata meta = new DefaultDatabaseMetadata();
meta.setDatabaseName(schema);
meta.setColumnName(field.getName());
meta.setTableName(tableName);
return meta;
});
}).collect(Collectors.toList());
}
return Response.ok(tables).build();
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource in project kylo by Teradata.
the class DatasourceModelTransform method toDatasource.
/**
* Transforms the specified domain object to a REST object.
*
* @param domain the domain object
* @param level the level of detail
* @return the REST object
* @throws IllegalArgumentException if the domain object cannot be converted
*/
public UserDatasource toDatasource(@Nonnull final com.thinkbiganalytics.metadata.api.datasource.UserDatasource domain, @Nonnull final Level level) {
final DatasourceDetails details = domain.getDetails().orElse(null);
if (details == null) {
final UserDatasource userDatasource = new UserDatasource();
updateDatasource(userDatasource, domain, level);
return userDatasource;
} else if (details instanceof JdbcDatasourceDetails) {
final JdbcDatasource jdbcDatasource = new JdbcDatasource();
updateDatasource(jdbcDatasource, domain, level);
return jdbcDatasource;
} else {
throw new IllegalArgumentException("Not a supported datasource details class: " + details.getClass());
}
}
use of com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource in project kylo by Teradata.
the class DatasourceIT method testCreateAndUpdateDatasource.
@Test
public void testCreateAndUpdateDatasource() throws Exception {
JdbcDatasource[] initialDatasources = getDatasources();
JdbcDatasource ds = new JdbcDatasource();
ds.setName("ds");
ds.setDescription("created by integration tests");
ds.setDatabaseConnectionUrl("jdbc:mysql://localhost:3306/kylo");
ds.setDatabaseDriverClassName("org.mariadb.jdbc.Driver");
ds.setDatabaseDriverLocation("/opt/nifi/mysql/mariadb-java-client-1.5.7.jar");
ds.setDatabaseUser("root");
ds.setPassword("secret");
ds.setType("mysql");
JdbcDatasource response = createDatasource(ds);
Assert.assertEquals(ds.getName(), response.getName());
Assert.assertEquals(ds.getDescription(), response.getDescription());
Assert.assertEquals(ds.getDatabaseConnectionUrl(), response.getDatabaseConnectionUrl());
Assert.assertEquals(ds.getDatabaseDriverClassName(), response.getDatabaseDriverClassName());
Assert.assertEquals(ds.getDatabaseDriverLocation(), response.getDatabaseDriverLocation());
Assert.assertEquals(ds.getDatabaseUser(), response.getDatabaseUser());
Assert.assertEquals(null, response.getPassword());
Assert.assertEquals(null, response.getIcon());
Assert.assertEquals(null, response.getIconColor());
// assert new datasource was added
JdbcDatasource[] currentDatasources = getDatasources();
Assert.assertEquals(initialDatasources.length + 1, currentDatasources.length);
ds = getDatasource(response.getId());
ds.setName("ds with updated name");
ds.setDescription("updated by integration tests");
ds.setDatabaseConnectionUrl("jdbc:mysql://localhost:3306/kylo");
ds.setIcon("stars");
ds.setIconColor("green");
JdbcDatasource updated = createDatasource(ds);
Assert.assertEquals(ds.getName(), updated.getName());
Assert.assertEquals(ds.getDescription(), updated.getDescription());
Assert.assertEquals(ds.getDatabaseConnectionUrl(), updated.getDatabaseConnectionUrl());
Assert.assertEquals(ds.getDatabaseDriverClassName(), updated.getDatabaseDriverClassName());
Assert.assertEquals(ds.getDatabaseDriverLocation(), updated.getDatabaseDriverLocation());
Assert.assertEquals(ds.getDatabaseUser(), updated.getDatabaseUser());
Assert.assertEquals(null, updated.getPassword());
Assert.assertEquals("stars", updated.getIcon());
Assert.assertEquals("green", updated.getIconColor());
// assert datasource was updated, rather than added
currentDatasources = getDatasources();
Assert.assertEquals(initialDatasources.length + 1, currentDatasources.length);
// delete datasource
deleteDatasource(ds.getId());
currentDatasources = getDatasources();
Assert.assertEquals(initialDatasources.length, currentDatasources.length);
getDatasourceExpectingStatus(ds.getId(), HTTP_NOT_FOUND);
}
use of com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource in project kylo by Teradata.
the class DataSourceProvider method toDataSource.
/**
* Converts a feed data source to a REST data source.
*/
@Nullable
@SuppressWarnings("squid:S3655")
private DataSource toDataSource(@Nullable final Datasource feedDataSource, @Nonnull final DatasourceModelTransform.Level level) {
// Transform to metadata data source
final com.thinkbiganalytics.metadata.rest.model.data.Datasource metadataDataSource;
if (feedDataSource != null) {
metadataDataSource = feedDataSourceTransform.toDatasource(feedDataSource, level);
} else {
return null;
}
// Add properties to data source
final DataSource dataSource = new DataSource();
dataSource.setId(metadataDataSource.getId());
dataSource.setTitle(metadataDataSource.getName());
// Set properties based on type
final Connector connector = new Connector();
final DefaultDataSetTemplate template;
if (metadataDataSource instanceof JdbcDatasource && getJdbcConnectorId().isPresent()) {
connector.setId(getJdbcConnectorId().get());
template = createTemplate((JdbcDatasource) metadataDataSource);
} else {
return null;
}
dataSource.setConnector(connector);
dataSource.setTemplate(template);
return dataSource;
}
Aggregations