use of io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException in project hopsworks by logicalclocks.
the class DistributedFsService method getDfsOpsForTesting.
public DistributedFileSystemOps getDfsOpsForTesting(String username) {
if (username == null || username.isEmpty()) {
throw new NullPointerException("username not set.");
}
// Get hdfs groups
Collection<HdfsGroups> groups = hdfsUsersFacade.findByName(username).getHdfsGroupsCollection();
String[] userGroups = new String[groups.size()];
Iterator<HdfsGroups> iter = groups.iterator();
int i = 0;
while (iter.hasNext()) {
userGroups[i] = iter.next().getName();
i++;
}
UserGroupInformation ugi;
try {
ugi = UserGroupInformation.createProxyUserForTesting(username, UserGroupInformation.getLoginUser(), userGroups);
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
return null;
}
if (settings.getHopsRpcTls()) {
// Runtime exceptions are not useful
try {
bhcs.materializeCertsForNonSuperUser(username);
Configuration newConf = new Configuration(conf);
bhcs.configureTlsForProjectSpecificUser(username, transientDir, newConf);
return new DistributedFileSystemOps(ugi, newConf);
} catch (CryptoPasswordNotFoundException ex) {
logger.log(Level.SEVERE, ex.getMessage(), ex);
bhcs.removeNonSuperUserCertificate(username);
return null;
}
}
return new DistributedFileSystemOps(ugi, conf);
}
use of io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException in project hopsworks by logicalclocks.
the class OfflineFeatureGroupController method openMetastoreClient.
// Here we can't use the HiveMetaStoreClient.java wrapper as we would need to export environment variables and so on
// instead we assemble directly the thirft client, which is what the HiveMetaStoreClient does behind the scenes.
private ThriftHiveMetastore.Client openMetastoreClient(Project project, Users user) throws ServiceException, IOException {
String hdfsUsername = hdfsUsersController.getHdfsUserName(project, user);
ThriftHiveMetastore.Client client = null;
try {
certificateMaterializer.materializeCertificatesLocal(user.getUsername(), project.getName());
CertificateMaterializer.CryptoMaterial userMaterial = certificateMaterializer.getUserMaterial(user.getUsername(), project.getName());
// read Password
String password = String.copyValueOf(userMaterial.getPassword());
// Get metastore service information from consul
Service metastoreService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.HIVE_METASTORE);
TTransport transport;
if (settings.getHopsRpcTls()) {
// Setup secure connection with the Hive metastore.
TSSLTransportFactory.TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters();
params.setTrustStore(certificateMaterializer.getUserTransientTruststorePath(project, user), password);
params.setKeyStore(certificateMaterializer.getUserTransientKeystorePath(project, user), password);
transport = TSSLTransportFactory.getClientSocket(metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT, params);
} else {
transport = new TSocket(TConfiguration.DEFAULT, metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT);
}
TProtocol protocol = new TBinaryProtocol(transport);
client = new ThriftHiveMetastore.Client(protocol);
// Open transport
if (!transport.isOpen()) {
transport.open();
}
// Set the UGI on the metastore side
client.set_ugi(hdfsUsername, new ArrayList<>());
if (settings.getHopsRpcTls()) {
// Send the certificate to the metastore so it can operate with the fs.
client.set_crypto(userMaterial.getKeyStore(), password, userMaterial.getTrustStore(), password, false);
}
} catch (CryptoPasswordNotFoundException | ServiceDiscoveryException | TException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.METASTORE_CONNECTION_ERROR, Level.SEVERE, "Hive metastore connection error", e.getMessage(), e);
}
return client;
}
use of io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException in project hopsworks by logicalclocks.
the class CachedFeaturegroupController method initConnection.
/**
* Initializes a JDBC connection (thrift RPC) to HS2 using SSL with a given project user and database
*
* @param databaseName name of the Hive database to open a connection to
* @param project the project of the user making the request
* @param user the user making the request
* @return conn the JDBC connection
* @throws FeaturestoreException
*/
private Connection initConnection(String databaseName, Project project, Users user) throws FeaturestoreException {
try {
// Create connection url
String hiveEndpoint = hiveController.getHiveServerInternalEndpoint();
// Materialize certs
certificateMaterializer.materializeCertificatesLocal(user.getUsername(), project.getName());
// Read password
String password = String.copyValueOf(certificateMaterializer.getUserMaterial(user.getUsername(), project.getName()).getPassword());
String jdbcString = HiveController.HIVE_JDBC_PREFIX + hiveEndpoint + "/" + databaseName + ";" + "auth=noSasl;ssl=true;twoWay=true;" + "sslTrustStore=" + certificateMaterializer.getUserTransientTruststorePath(project, user) + ";" + "trustStorePassword=" + password + ";" + "sslKeyStore=" + certificateMaterializer.getUserTransientKeystorePath(project, user) + ";" + "keyStorePassword=" + password;
return DriverManager.getConnection(jdbcString);
} catch (FileNotFoundException | CryptoPasswordNotFoundException | ServiceDiscoveryException e) {
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.CERTIFICATES_NOT_FOUND, Level.SEVERE, "project: " + project.getName() + ", hive database: " + databaseName, e.getMessage(), e);
} catch (SQLException | IOException e) {
certificateMaterializer.removeCertificatesLocal(user.getUsername(), project.getName());
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.COULD_NOT_INITIATE_HIVE_CONNECTION, Level.SEVERE, "project: " + project.getName() + ", hive database: " + databaseName, e.getMessage(), e);
}
}
use of io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException in project hopsworks by logicalclocks.
the class DistributedFsService method getDfsOps.
/**
* Returns the user specific distributed file system operations
* <p>
* @param username
* @return
*/
public DistributedFileSystemOps getDfsOps(String username) {
if (username == null || username.isEmpty()) {
throw new IllegalArgumentException("username not provided.");
}
UserGroupInformation ugi;
try {
ugi = UserGroupInformation.createProxyUser(username, UserGroupInformation.getLoginUser());
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
return null;
}
if (settings.getHopsRpcTls()) {
// Runtime exceptions are not useful
try {
bhcs.materializeCertsForNonSuperUser(username);
Configuration newConf = new Configuration(conf);
bhcs.configureTlsForProjectSpecificUser(username, transientDir, newConf);
return new DistributedFileSystemOps(ugi, newConf);
} catch (CryptoPasswordNotFoundException ex) {
logger.log(Level.SEVERE, ex.getMessage(), ex);
bhcs.removeNonSuperUserCertificate(username);
return null;
}
}
return new DistributedFileSystemOps(ugi, conf);
}
use of io.hops.hopsworks.exceptions.CryptoPasswordNotFoundException in project hopsworks by logicalclocks.
the class KafkaInferenceLogger method logInferenceRequest.
@Override
@Asynchronous
public void logInferenceRequest(Serving serving, String inferenceRequest, Integer responseHttpCode, String inferenceResponse) {
if (serving.getKafkaTopic() == null) {
// nothing to log
return;
}
// Setup the producer for the given project
KafkaProducer<String, byte[]> kafkaProducer = null;
try {
kafkaProducer = setupProducer(serving.getProject());
} catch (IOException | CryptoPasswordNotFoundException e) {
LOGGER.log(Level.FINE, "Failed to setup the produce for the project: " + serving.getProject().getName(), e);
// We didn't manage to write the log to Kafka, nothing we can do.
}
// Get the schema for the topic and the serializer
Schema avroSchema = new Schema.Parser().parse(serving.getKafkaTopic().getSubjects().getSchema().getSchema());
Injection<GenericRecord, byte[]> recordSerializer = GenericAvroCodecs.toBinary(avroSchema);
// Get the version of the schema
int schemaVersion = serving.getKafkaTopic().getSubjects().getVersion();
// Create the GenericRecord from the avroSchema
GenericData.Record inferenceRecord = new GenericData.Record(avroSchema);
// Populate the Inference Record with data
populateInfererenceRecord(serving, inferenceRequest, responseHttpCode, inferenceResponse, inferenceRecord, schemaVersion);
// Serialize record to byte
byte[] inferenceRecordBytes = recordSerializer.apply(inferenceRecord);
// Push the record to the topic
ProducerRecord<String, byte[]> inferenceKakfaRecord = new ProducerRecord<>(serving.getKafkaTopic().getTopicName(), inferenceRecordBytes);
try {
kafkaProducer.send(inferenceKakfaRecord);
} catch (Exception e) {
LOGGER.log(Level.FINE, "Cannot write to topic: " + serving.getKafkaTopic().getTopicName(), e);
// We didn't manage to write the log to Kafka, nothing we can do.
} finally {
if (kafkaProducer != null) {
kafkaProducer.flush();
kafkaProducer.close();
}
}
// De-materialize certificate
certificateMaterializer.removeCertificatesLocal(SERVING_MANAGER_USERNAME, serving.getProject().getName());
}
Aggregations