use of com.thinkbiganalytics.metadata.api.catalog.Connector in project kylo by Teradata.
the class MigrateLegacyDatasourcesUpgradeAction method upgradeTo.
@Override
public void upgradeTo(final KyloVersion targetVersion) {
log.info("Setting up catalog access control: {}", targetVersion);
try {
NodeTypeManager typeMgr = JcrMetadataAccess.getActiveSession().getWorkspace().getNodeTypeManager();
NodeType dataSourceType = typeMgr.getNodeType("tba:DataSource");
Connector conn = pluginManager.getPlugin("jdbc").flatMap(plugin -> connectorProvider.findByPlugin(plugin.getId())).orElseThrow(() -> new IllegalStateException("No JDBC connector found"));
legacyProvider.getDatasources().stream().map(JcrDatasource.class::cast).map(JcrDatasource::getNode).filter(node -> JcrUtil.isNodeType(node, "tba:userDatasource")).filter(node -> JcrUtil.isNodeType(JcrUtil.getNode(node, "tba:details"), "tba:jdbcDatasourceDetails")).forEach(jdbcNode -> {
try {
// Move the legacy datasource node to a temporary parent that doesn't have any constraints (the catalog folder).
Node tmpParent = JcrUtil.getNode(JcrUtil.getRootNode(jdbcNode), "metadata/catalog");
String dsSystemName = dataSourceProvider.generateSystemName(JcrUtil.getName(jdbcNode));
Path catDsPath = MetadataPaths.dataSourcePath(conn.getSystemName(), dsSystemName);
Node catDsNode = JcrUtil.moveNode(jdbcNode, JcrUtil.path(tmpParent.getPath()).resolve(dsSystemName));
// Wrap the node as a legacy UserDataSource and collect its properties.
JcrUserDatasource legacyDs = JcrUtil.getJcrObject(catDsNode, JcrUserDatasource.class);
Set<? extends JcrFeed> referencingFeeds = legacyDs.getFeedSources().stream().map(FeedSource::getFeed).map(JcrFeed.class::cast).collect(Collectors.toSet());
AtomicReference<String> controllerServiceId = new AtomicReference<>();
AtomicReference<String> password = new AtomicReference<>();
legacyDs.getDetails().filter(JcrJdbcDatasourceDetails.class::isInstance).map(JcrJdbcDatasourceDetails.class::cast).ifPresent(details -> {
controllerServiceId.set(details.getControllerServiceId().orElse(null));
password.set("{cipher}" + details.getPassword());
});
if (this.accessController.isEntityAccessControlled()) {
legacyDs.disableAccessControl(legacyDs.getOwner());
}
// Convert the legacy type into the catalog type.
JcrDataSource catDs = convertToDataSource(catDsNode, catDsPath, dataSourceType, controllerServiceId.get(), password.get());
linkDataSets(catDs, referencingFeeds);
if (this.accessController.isEntityAccessControlled()) {
List<SecurityRole> roles = roleProvider.getEntityRoles(SecurityRole.DATASOURCE);
actionsProvider.getAvailableActions(AllowedActions.DATASOURCE).ifPresent(actions -> catDs.enableAccessControl((JcrAllowedActions) actions, legacyDs.getOwner(), roles));
}
} catch (RepositoryException e) {
throw new UpgradeException("Failed to migrate legacy datasources", e);
}
});
} catch (IllegalStateException | RepositoryException e) {
throw new UpgradeException("Failed to migrate legacy datasources", e);
}
}
use of com.thinkbiganalytics.metadata.api.catalog.Connector in project kylo by Teradata.
the class DefaultDataSourcesUpgradeAction method upgradeTo.
@Override
public void upgradeTo(final KyloVersion targetVersion) {
log.info("Creating default catalog data sources: {}", targetVersion);
Optional<ConnectorPlugin> plugin = pluginManager.getPlugin("hive");
if (plugin.isPresent()) {
Optional<Connector> connector = connectorProvider.findByPlugin(plugin.get().getId());
if (connector.isPresent()) {
List<DataSource> hiveSources = dataSourceProvider.findByConnector(connector.get().getId());
// If at least one Hive data source exists then do nothing.
if (hiveSources.size() == 0) {
log.info("Creating default Hive data source");
DataSource ds = dataSourceProvider.create(connector.get().getId(), "Hive");
ds.setDescription("The default Hive data source");
DataSetSparkParameters params = ds.getSparkParameters();
params.setFormat("hive");
params.addOption("driver", this.driver);
params.addOption("url", this.url);
params.addOption("user", this.user);
params.addOption("password", this.password);
} else {
log.info("One or more Hive data sources already found: {}", hiveSources.stream().map(DataSource::toString).collect(Collectors.toList()));
}
} else {
log.warn("No Hive connector found - cannot create a default Hive data source");
}
} else {
log.warn("No Hive connector plugin found - cannot create a default Hive data source");
}
}
use of com.thinkbiganalytics.metadata.api.catalog.Connector in project kylo by Teradata.
the class AccessControlConfigurator method ensureCatalogAccessControl.
/**
* Ensures that the entity-level access control is setup up for the entities introduced by the connector architecture.
*/
public void ensureCatalogAccessControl() {
List<Connector> connectors = connectorProvider.findAll(true);
List<SecurityRole> conntorRoles = this.roleProvider.getEntityRoles(SecurityRole.CONNECTOR);
Optional<AllowedActions> connectorActions = this.actionsProvider.getAvailableActions(AllowedActions.CONNECTOR);
connectors.stream().forEach(conn -> {
Principal owner = conn.getOwner() != null ? conn.getOwner() : JcrMetadataAccess.getActiveUser();
connectorActions.ifPresent(actions -> ((JcrConnector) conn).enableAccessControl((JcrAllowedActions) actions, owner, conntorRoles));
});
List<DataSource> dataSources = dataSourceProvider.findAll();
List<SecurityRole> dataSourceRoles = this.roleProvider.getEntityRoles(SecurityRole.DATASOURCE);
Optional<AllowedActions> dataSourceActions = this.actionsProvider.getAvailableActions(AllowedActions.DATASOURCE);
dataSources.stream().map(JcrDataSource.class::cast).forEach(dataSource -> {
Principal owner = dataSource.getOwner() != null ? dataSource.getOwner() : JcrMetadataAccess.getActiveUser();
dataSourceActions.ifPresent(actions -> dataSource.enableAccessControl((JcrAllowedActions) actions, owner, dataSourceRoles));
});
}
use of com.thinkbiganalytics.metadata.api.catalog.Connector in project kylo by Teradata.
the class CatalogMetadataConfig method doPluginSyncAction.
private void doPluginSyncAction(ConnectorPluginManager pluginMgr, MetadataAccess metadata) {
metadata.commit(() -> {
List<ConnectorPlugin> plugins = pluginMgr.getPlugins();
Map<String, Connector> connectorMap = connectorProvider().findAll(true).stream().collect(Collectors.toMap(Connector::getPluginId, c -> c));
for (ConnectorPlugin plugin : plugins) {
Connector connector = connectorMap.get(plugin.getId());
ConnectorPluginDescriptor descr = plugin.getDescriptor();
if (connector != null) {
connectorMap.get(plugin.getId()).setActive(true);
connectorMap.remove(plugin.getId());
} else {
String title = descr.getTitle();
connector = connectorProvider().create(plugin.getId(), title);
}
connector.setIconColor(descr.getColor());
connector.setIcon(descr.getIcon());
if (StringUtils.isNotBlank(descr.getFormat())) {
connector.getSparkParameters().setFormat(descr.getFormat());
}
}
// least one data source, or removed if they don't.
for (Connector connector : connectorMap.values()) {
if (connector.getDataSources().isEmpty()) {
connectorProvider().deleteById(connector.getId());
} else {
connector.setActive(false);
}
}
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.api.catalog.Connector in project kylo by Teradata.
the class JcrDataSetProviderTest method setup.
@BeforeClass
public void setup() {
metadata.commit(() -> {
Connector conn = this.connProvider.create("plugin1", "test1" + "_conn");
this.dSrcIds.add(this.dataSourceProvider.create(conn.getId(), "dataset0").getId());
this.dSrcIds.add(this.dataSourceProvider.create(conn.getId(), "dataset1").getId());
}, MetadataAccess.SERVICE);
}
Aggregations