use of com.thinkbiganalytics.kylo.catalog.CatalogException in project kylo by Teradata.
the class S3FileSystemProvider method getBasicCredentialsProvider.
/**
* Gets a basic credentials provider from the specified Hadoop configuration.
*/
@Nonnull
@VisibleForTesting
Optional<AWSCredentialsProvider> getBasicCredentialsProvider(@Nonnull final URI uri, @Nonnull final Configuration conf) {
// Read credentials from configuration
String scheme = uri.getScheme();
final String accessKeyProperty = "fs." + scheme + ".awsAccessKeyId";
final String accessKey = conf.get(accessKeyProperty);
final String secretKeyProperty = "fs." + scheme + ".awsSecretAccessKey";
final String secretKey = conf.get(secretKeyProperty);
// Return credentials provider
if (accessKey != null && secretKey != null) {
return Optional.of(new BasicAWSCredentialsProvider(accessKey, secretKey));
} else if (secretKey != null) {
throw new CatalogException("catalog.fs.s3.missingAccessKeyProperty", accessKeyProperty);
} else if (accessKey != null) {
throw new CatalogException("catalog.fs.s3.missingSecretAccessKeyProperty", secretKeyProperty);
} else {
log.info("AWS Access Key ID and Secret Access Key must be specified by setting the {} and {} properties (respectively).", accessKeyProperty, secretKeyProperty);
return Optional.empty();
}
}
use of com.thinkbiganalytics.kylo.catalog.CatalogException in project kylo by Teradata.
the class S3FileSystemProvider method listFiles.
@Nonnull
@Override
public List<DataSetFile> listFiles(@Nonnull final Path path, @Nonnull final Configuration conf) {
// Determine the credentials
final AmazonS3 s3;
final URI uri = path.toUri();
if ("s3".equalsIgnoreCase(uri.getScheme()) || "s3bfs".equalsIgnoreCase(uri.getScheme()) || "s3n".equalsIgnoreCase(uri.getScheme())) {
s3 = createS3Client(uri, conf);
} else if ("s3a".equalsIgnoreCase(uri.getScheme())) {
final Class<? extends S3ClientFactory> s3ClientFactoryClass = conf.getClass(Constants.S3_CLIENT_FACTORY_IMPL, Constants.DEFAULT_S3_CLIENT_FACTORY_IMPL, S3ClientFactory.class);
try {
s3 = ReflectionUtils.newInstance(s3ClientFactoryClass, conf).createS3Client(uri);
} catch (final IOException e) {
throw new IllegalArgumentException("Unable to create S3 client: " + e, e);
}
} else {
log.debug("Scheme {} not supported for S3 path: {}", uri.getScheme(), path);
throw new CatalogException("catalog.fs.s3.invalidScheme", uri.getScheme());
}
// Fetch the list of buckets
try {
return s3.listBuckets().stream().map(bucket -> {
final DataSetFile file = new DataSetFile();
file.setName(bucket.getName());
file.setDirectory(true);
file.setModificationTime(bucket.getCreationDate().getTime());
file.setPath(uri.getScheme() + "://" + bucket.getName() + "/");
return file;
}).collect(Collectors.toList());
} finally {
s3.shutdown();
}
}
use of com.thinkbiganalytics.kylo.catalog.CatalogException in project kylo by Teradata.
the class AzureNativeFileSystemProvider method getCredentials.
@Nullable
private StorageCredentials getCredentials(@Nonnull final URI uri, @Nonnull final Configuration conf) {
// Find account name
final String accountName = uri.getHost();
if (StringUtils.isEmpty(accountName)) {
throw new CatalogException("catalog.fs.azure.missingAccountName");
}
// Find account key
final String accountKey;
try {
accountKey = AzureNativeFileSystemStore.getAccountKeyFromConfiguration(accountName, conf);
} catch (final KeyProviderException e) {
throw new CatalogException("catalog.fs.azure.invalidAccountKey");
}
// Create credentials
if (StringUtils.isNotEmpty(accountKey)) {
final String rawAccountName = accountName.split("\\.")[0];
return new StorageCredentialsAccountAndKey(rawAccountName, accountKey);
} else {
return null;
}
}
use of com.thinkbiganalytics.kylo.catalog.CatalogException in project kylo by Teradata.
the class DataSourceProvider method createDataSource.
/**
* Creates a new data source using the specified template.
*
* @throws CatalogException if the data source is not valid
*/
@Nonnull
public DataSource createDataSource(@Nonnull final DataSource source, boolean checkExisting) throws PotentialControllerServiceConflictException {
return metadataService.commit(() -> {
// Find connector
final com.thinkbiganalytics.metadata.api.catalog.Connector connector = Optional.ofNullable(source.getConnector()).map(Connector::getId).map(connectorProvider::resolveId).flatMap(connectorProvider::find).orElseThrow(() -> new CatalogException("catalog.datasource.connector.invalid"));
// Create data source
final com.thinkbiganalytics.metadata.api.catalog.DataSource domain = metadataProvider.create(connector.getId(), source.getTitle());
// Create or update controller service
final ConnectorPluginDescriptor plugin = pluginManager.getPlugin(connector.getPluginId()).map(ConnectorPlugin::getDescriptor).orElse(null);
if (plugin != null && plugin.getNifiControllerService() != null) {
createOrUpdateNiFiControllerService(source, plugin, checkExisting);
}
// Update catalog
final DataSource updatedDataSource = this.credentialManager.applyPlaceholders(source, SecurityContextUtil.getCurrentPrincipals());
modelTransform.updateDataSource(updatedDataSource, domain);
// Return a copy with the connector
return modelTransform.dataSourceToRestModel().apply(domain);
});
}
use of com.thinkbiganalytics.kylo.catalog.CatalogException in project kylo by Teradata.
the class DataSourceController method updateDataSource.
@PUT
@ApiOperation("Updates an existing data source")
@ApiResponses({ @ApiResponse(code = 200, message = "Data source updated", response = DataSource.class), @ApiResponse(code = 400, message = "Invalid connector", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Internal server error", response = RestResponseStatus.class) })
@Path("{id}")
@Consumes(MediaType.APPLICATION_JSON)
public Response updateDataSource(@Nonnull final DataSource source) {
log.entry(source);
final DataSource dataSource;
try {
dataSource = dataSourceService.updateDataSource(source);
} catch (final CatalogException e) {
if (log.isDebugEnabled()) {
log.debug("Cannot create data source from request: " + source, e);
}
throw new BadRequestException(getMessage(e));
}
return Response.ok(log.exit(dataSource)).build();
}
Aggregations