use of com.logicalclocks.servicediscoverclient.service.Service in project hopsworks by logicalclocks.
the class OfflineFeatureGroupController method openMetastoreClient.
// Here we can't use the HiveMetaStoreClient.java wrapper as we would need to export environment variables and so on
// instead we assemble directly the thirft client, which is what the HiveMetaStoreClient does behind the scenes.
private ThriftHiveMetastore.Client openMetastoreClient(Project project, Users user) throws ServiceException, IOException {
String hdfsUsername = hdfsUsersController.getHdfsUserName(project, user);
ThriftHiveMetastore.Client client = null;
try {
certificateMaterializer.materializeCertificatesLocal(user.getUsername(), project.getName());
CertificateMaterializer.CryptoMaterial userMaterial = certificateMaterializer.getUserMaterial(user.getUsername(), project.getName());
// read Password
String password = String.copyValueOf(userMaterial.getPassword());
// Get metastore service information from consul
Service metastoreService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.HIVE_METASTORE);
TTransport transport;
if (settings.getHopsRpcTls()) {
// Setup secure connection with the Hive metastore.
TSSLTransportFactory.TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters();
params.setTrustStore(certificateMaterializer.getUserTransientTruststorePath(project, user), password);
params.setKeyStore(certificateMaterializer.getUserTransientKeystorePath(project, user), password);
transport = TSSLTransportFactory.getClientSocket(metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT, params);
} else {
transport = new TSocket(TConfiguration.DEFAULT, metastoreService.getAddress(), metastoreService.getPort(), CONNECTION_TIMEOUT);
}
TProtocol protocol = new TBinaryProtocol(transport);
client = new ThriftHiveMetastore.Client(protocol);
// Open transport
if (!transport.isOpen()) {
transport.open();
}
// Set the UGI on the metastore side
client.set_ugi(hdfsUsername, new ArrayList<>());
if (settings.getHopsRpcTls()) {
// Send the certificate to the metastore so it can operate with the fs.
client.set_crypto(userMaterial.getKeyStore(), password, userMaterial.getTrustStore(), password, false);
}
} catch (CryptoPasswordNotFoundException | ServiceDiscoveryException | TException e) {
throw new ServiceException(RESTCodes.ServiceErrorCode.METASTORE_CONNECTION_ERROR, Level.SEVERE, "Hive metastore connection error", e.getMessage(), e);
}
return client;
}
use of com.logicalclocks.servicediscoverclient.service.Service in project hopsworks by logicalclocks.
the class HttpClient method execute.
public <T> T execute(HttpRequest request, ResponseHandler<T> handler) throws IOException {
if (host == null) {
try {
Service hopsworksService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
host = new HttpHost(hopsworksService.getName(), hopsworksService.getPort(), "HTTPS");
} catch (ServiceDiscoveryException ex) {
throw new IOException(ex);
}
}
return client.execute(host, request, handler);
}
use of com.logicalclocks.servicediscoverclient.service.Service in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method createJupyterNotebookConfig.
public void createJupyterNotebookConfig(Writer out, Project project, int port, JupyterSettings js, String hdfsUser, String certsDir, String allowOrigin) throws IOException, ServiceException, ServiceDiscoveryException {
Service namenode = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.RPC_NAMENODE);
String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
String remoteGitURL = "";
String apiKey = "";
String gitBackend = "";
if (js.isGitBackend() && js.getGitConfig() != null) {
remoteGitURL = js.getGitConfig().getRemoteGitURL();
gitBackend = js.getGitConfig().getGitBackend().name();
if (!Strings.isNullOrEmpty(js.getGitConfig().getApiKeyName())) {
apiKey = jupyterNbVCSController.getGitApiKey(hdfsUser, js.getGitConfig().getApiKeyName());
}
}
JupyterContentsManager jcm = jupyterNbVCSController.getJupyterContentsManagerClass(remoteGitURL);
JupyterNotebookConfigTemplate template = JupyterNotebookConfigTemplateBuilder.newBuilder().setProject(project).setNamenodeIp(namenode.getAddress()).setNamenodePort(String.valueOf(namenode.getPort())).setContentsManager(jcm.getClassName()).setHopsworksEndpoint(hopsworksRestEndpoint).setElasticEndpoint(settings.getElasticEndpoint()).setPort(port).setBaseDirectory(js.getBaseDir()).setHdfsUser(hdfsUser).setWhiteListedKernels("'" + pythonKernelName(project.getPythonEnvironment().getPythonVersion()) + "', 'pysparkkernel', 'sparkkernel', 'sparkrkernel'").setHadoopHome(settings.getHadoopSymbolicLinkDir()).setJupyterCertsDirectory(certsDir).setSecretDirectory(settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret()).setAllowOrigin(allowOrigin).setWsPingInterval(settings.getJupyterWSPingInterval()).setApiKey(apiKey).setGitBackend(gitBackend).setFlinkConfDirectory(settings.getFlinkConfDir()).setFlinkLibDirectory(settings.getFlinkLibDir()).setHadoopClasspathGlob(settings.getHadoopClasspathGlob()).setRequestsVerify(settings.getRequestsVerify()).setDomainCATruststore(Paths.get(certsDir, hdfsUser + Settings.TRUSTSTORE_SUFFIX).toString()).setServiceDiscoveryDomain(settings.getServiceDiscoveryDomain()).setKafkaBrokers(kafkaBrokers.getKafkaBrokersString()).build();
Map<String, Object> dataModel = new HashMap<>(1);
dataModel.put("conf", template);
try {
templateEngine.template(JupyterNotebookConfigTemplate.TEMPLATE_NAME, dataModel, out);
} catch (TemplateException ex) {
throw new IOException(ex);
}
}
use of com.logicalclocks.servicediscoverclient.service.Service in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method createSparkMagicConfig.
public void createSparkMagicConfig(Writer out, Project project, JupyterSettings js, String hdfsUser, Users hopsworksUser, String confDirPath) throws IOException, ServiceDiscoveryException, JobException, ApiKeyException {
SparkJobConfiguration sparkJobConfiguration = (SparkJobConfiguration) js.getJobConfig();
// If user selected Python we should use the default spark configuration for Spark/PySpark kernels
if (js.isPythonKernel()) {
sparkJobConfiguration = (SparkJobConfiguration) jobController.getConfiguration(project, JobType.SPARK, true);
}
SparkConfigurationUtil sparkConfigurationUtil = new SparkConfigurationUtil();
Map<String, String> extraJavaOptions = new HashMap<>();
extraJavaOptions.put(Settings.LOGSTASH_JOB_INFO, project.getName().toLowerCase() + ",jupyter,notebook,?");
HashMap<String, String> finalSparkConfiguration = new HashMap<>();
finalSparkConfiguration.put(Settings.SPARK_DRIVER_STAGINGDIR_ENV, "hdfs:///Projects/" + project.getName() + "/Resources/.sparkStaging");
// Set Hopsworks consul service domain, don't use the address, use the name
String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
finalSparkConfiguration.putAll(sparkConfigurationUtil.setFrameworkProperties(project, sparkJobConfiguration, settings, hdfsUser, hopsworksUser, extraJavaOptions, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
StringBuilder sparkConfBuilder = new StringBuilder();
ArrayList<String> keys = new ArrayList<>(finalSparkConfiguration.keySet());
Collections.sort(keys);
for (String configKey : keys) {
sparkConfBuilder.append("\t\"" + configKey + "\":\"" + finalSparkConfiguration.get(configKey) + "\"," + "\n");
}
sparkConfBuilder.deleteCharAt(sparkConfBuilder.lastIndexOf(","));
try {
Service livyService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.LIVY);
SparkMagicConfigTemplateBuilder templateBuilder = SparkMagicConfigTemplateBuilder.newBuilder().setLivyIp(livyService.getAddress()).setJupyterHome(confDirPath).setDriverCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_DRIVER_CORES_ENV))).setDriverMemory(finalSparkConfiguration.get(Settings.SPARK_DRIVER_MEMORY_ENV)).setLivyStartupTimeout(settings.getLivyStartupTimeout());
if (sparkJobConfiguration.isDynamicAllocationEnabled() || sparkJobConfiguration.getExperimentType() != null) {
templateBuilder.setNumExecutors(1);
} else {
templateBuilder.setNumExecutors(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_NUMBER_EXECUTORS_ENV)));
}
templateBuilder.setExecutorCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_CORES_ENV))).setExecutorMemory(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_MEMORY_ENV)).setHdfsUser(hdfsUser).setYarnQueue(sparkJobConfiguration.getAmQueue()).setHadoopHome(settings.getHadoopSymbolicLinkDir()).setHadoopVersion(settings.getHadoopVersion()).setSparkConfiguration(sparkConfBuilder.toString());
Map<String, Object> dataModel = new HashMap<>(1);
dataModel.put("conf", templateBuilder.build());
templateEngine.template(SparkMagicConfigTemplate.TEMPLATE_NAME, dataModel, out);
} catch (TemplateException | ServiceDiscoveryException ex) {
throw new IOException(ex);
}
}
use of com.logicalclocks.servicediscoverclient.service.Service in project hopsworks by logicalclocks.
the class ServiceDiscoveryController method getAnyAddressOfServiceWithDNS.
@Lock(LockType.READ)
public Service getAnyAddressOfServiceWithDNS(HopsworksService serviceName) throws ServiceDiscoveryException {
ServiceQuery serviceQuery = ServiceQuery.of(constructServiceFQDN(serviceName), Collections.emptySet());
Optional<Service> serviceOpt = getService(Type.DNS, serviceQuery).findAny();
return serviceOpt.orElseThrow(() -> new ServiceNotFoundException("Could not find service with: " + serviceQuery));
}
Aggregations