use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method createJupyterKernelConfig.
public void createJupyterKernelConfig(Writer out, Project project, JupyterSettings js, String hdfsUser) throws IOException {
try {
KernelTemplate kernelTemplate = KernelTemplateBuilder.newBuilder().setHdfsUser(hdfsUser).setHadoopHome(settings.getHadoopSymbolicLinkDir()).setHadoopVersion(settings.getHadoopVersion()).setAnacondaHome(settings.getAnacondaProjectDir()).setSecretDirectory(settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret()).setProject(project).setHiveEndpoints(hiveController.getHiveServerInternalEndpoint()).setLibHdfsOpts("-Xmx512m").build();
Map<String, Object> dataModel = new HashMap<>(1);
dataModel.put("kernel", kernelTemplate);
templateEngine.template(KernelTemplate.TEMPLATE_NAME, dataModel, out);
} catch (TemplateException | ServiceDiscoveryException ex) {
throw new IOException(ex);
}
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method createSparkMagicConfig.
public void createSparkMagicConfig(Writer out, Project project, JupyterSettings js, String hdfsUser, Users hopsworksUser, String confDirPath) throws IOException, ServiceDiscoveryException, JobException, ApiKeyException {
SparkJobConfiguration sparkJobConfiguration = (SparkJobConfiguration) js.getJobConfig();
// If user selected Python we should use the default spark configuration for Spark/PySpark kernels
if (js.isPythonKernel()) {
sparkJobConfiguration = (SparkJobConfiguration) jobController.getConfiguration(project, JobType.SPARK, true);
}
SparkConfigurationUtil sparkConfigurationUtil = new SparkConfigurationUtil();
Map<String, String> extraJavaOptions = new HashMap<>();
extraJavaOptions.put(Settings.LOGSTASH_JOB_INFO, project.getName().toLowerCase() + ",jupyter,notebook,?");
HashMap<String, String> finalSparkConfiguration = new HashMap<>();
finalSparkConfiguration.put(Settings.SPARK_DRIVER_STAGINGDIR_ENV, "hdfs:///Projects/" + project.getName() + "/Resources/.sparkStaging");
// Set Hopsworks consul service domain, don't use the address, use the name
String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
finalSparkConfiguration.putAll(sparkConfigurationUtil.setFrameworkProperties(project, sparkJobConfiguration, settings, hdfsUser, hopsworksUser, extraJavaOptions, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
StringBuilder sparkConfBuilder = new StringBuilder();
ArrayList<String> keys = new ArrayList<>(finalSparkConfiguration.keySet());
Collections.sort(keys);
for (String configKey : keys) {
sparkConfBuilder.append("\t\"" + configKey + "\":\"" + finalSparkConfiguration.get(configKey) + "\"," + "\n");
}
sparkConfBuilder.deleteCharAt(sparkConfBuilder.lastIndexOf(","));
try {
Service livyService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.LIVY);
SparkMagicConfigTemplateBuilder templateBuilder = SparkMagicConfigTemplateBuilder.newBuilder().setLivyIp(livyService.getAddress()).setJupyterHome(confDirPath).setDriverCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_DRIVER_CORES_ENV))).setDriverMemory(finalSparkConfiguration.get(Settings.SPARK_DRIVER_MEMORY_ENV)).setLivyStartupTimeout(settings.getLivyStartupTimeout());
if (sparkJobConfiguration.isDynamicAllocationEnabled() || sparkJobConfiguration.getExperimentType() != null) {
templateBuilder.setNumExecutors(1);
} else {
templateBuilder.setNumExecutors(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_NUMBER_EXECUTORS_ENV)));
}
templateBuilder.setExecutorCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_CORES_ENV))).setExecutorMemory(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_MEMORY_ENV)).setHdfsUser(hdfsUser).setYarnQueue(sparkJobConfiguration.getAmQueue()).setHadoopHome(settings.getHadoopSymbolicLinkDir()).setHadoopVersion(settings.getHadoopVersion()).setSparkConfiguration(sparkConfBuilder.toString());
Map<String, Object> dataModel = new HashMap<>(1);
dataModel.put("conf", templateBuilder.build());
templateEngine.template(SparkMagicConfigTemplate.TEMPLATE_NAME, dataModel, out);
} catch (TemplateException | ServiceDiscoveryException ex) {
throw new IOException(ex);
}
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class JupyterConfigFilesGenerator method generateConfiguration.
public JupyterPaths generateConfiguration(Project project, String secretConfig, String hdfsUser, Users hopsworksUser, JupyterSettings js, Integer port, String allowOrigin) throws ServiceException, JobException {
boolean newDir = false;
JupyterPaths jp = generateJupyterPaths(project, hdfsUser, secretConfig);
try {
newDir = createJupyterDirs(jp);
createConfigFiles(jp, hdfsUser, hopsworksUser, project, port, js, allowOrigin);
} catch (IOException | ServiceException | ServiceDiscoveryException | ApiKeyException e) {
if (newDir) {
// if the folder was newly created delete it
removeProjectUserDirRecursive(jp);
}
LOGGER.log(Level.SEVERE, "Error in initializing JupyterConfig for project: {0}. {1}", new Object[] { project.getName(), e });
throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_ADD_FAILURE, Level.SEVERE, null, e.getMessage(), e);
}
return jp;
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class ProjectController method addServiceFeaturestore.
/**
* Add the featurestore service to the project,
* 1. create the hive database for the featurestore
* 2. insert featurestore metadata in the hopsworks db
* 3. create a hopsworks dataset for the featurestore
* 4. create a directory in resources to store json configurations for feature import jobs.
*
* @param project the project to add the featurestore service for
* @param user the user adding the service
* @param dfso dfso
*/
private void addServiceFeaturestore(Project project, Users user, DistributedFileSystemOps dfso, ProvTypeDTO datasetProvCore) throws FeaturestoreException, ProjectException, UserException {
String featurestoreName = featurestoreController.getOfflineFeaturestoreDbName(project);
try {
// Create HiveDB for the featurestore
hiveController.createDatabase(featurestoreName, "Featurestore database for project: " + project.getName());
// Store featurestore metadata in Hopsworks
Dataset trainingDatasets = datasetController.getByProjectAndDsName(project, null, project.getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName());
Featurestore featurestore = featurestoreController.createProjectFeatureStore(project, user, featurestoreName, trainingDatasets);
// Create Hopsworks Dataset of the HiveDb
hiveController.createDatasetDb(project, user, dfso, featurestoreName, DatasetType.FEATURESTORE, featurestore, datasetProvCore);
// Register built-in transformation function.
transformationFunctionController.registerBuiltInTransformationFunctions(user, project, featurestore);
} catch (SQLException | IOException | ServiceDiscoveryException ex) {
LOGGER.log(Level.SEVERE, RESTCodes.FeaturestoreErrorCode.COULD_NOT_CREATE_FEATURESTORE.getMessage(), ex);
throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.COULD_NOT_CREATE_FEATURESTORE, Level.SEVERE, "project: " + project.getName(), ex.getMessage(), ex);
}
}
use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.
the class ProjectController method addServiceHive.
private void addServiceHive(Project project, Users user, DistributedFileSystemOps dfso, ProvTypeDTO datasetProvCore) throws ProjectException {
try {
hiveController.createDatabase(project.getName(), "Project general-purpose Hive database");
hiveController.createDatasetDb(project, user, dfso, project.getName(), datasetProvCore);
} catch (SQLException | IOException | ServiceDiscoveryException ex) {
throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_HIVEDB_CREATE_ERROR, Level.SEVERE, "project: " + project.getName(), ex.getMessage(), ex);
}
}
Aggregations