Search in sources :

Example 16 with ServiceDiscoveryException

use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.

the class JupyterConfigFilesGenerator method createJupyterKernelConfig.

public void createJupyterKernelConfig(Writer out, Project project, JupyterSettings js, String hdfsUser) throws IOException {
    try {
        KernelTemplate kernelTemplate = KernelTemplateBuilder.newBuilder().setHdfsUser(hdfsUser).setHadoopHome(settings.getHadoopSymbolicLinkDir()).setHadoopVersion(settings.getHadoopVersion()).setAnacondaHome(settings.getAnacondaProjectDir()).setSecretDirectory(settings.getStagingDir() + Settings.PRIVATE_DIRS + js.getSecret()).setProject(project).setHiveEndpoints(hiveController.getHiveServerInternalEndpoint()).setLibHdfsOpts("-Xmx512m").build();
        Map<String, Object> dataModel = new HashMap<>(1);
        dataModel.put("kernel", kernelTemplate);
        templateEngine.template(KernelTemplate.TEMPLATE_NAME, dataModel, out);
    } catch (TemplateException | ServiceDiscoveryException ex) {
        throw new IOException(ex);
    }
}
Also used : HashMap(java.util.HashMap) TemplateException(freemarker.template.TemplateException) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) IOException(java.io.IOException) KernelTemplate(io.hops.hopsworks.common.util.templates.jupyter.KernelTemplate)

Example 17 with ServiceDiscoveryException

use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.

the class JupyterConfigFilesGenerator method createSparkMagicConfig.

public void createSparkMagicConfig(Writer out, Project project, JupyterSettings js, String hdfsUser, Users hopsworksUser, String confDirPath) throws IOException, ServiceDiscoveryException, JobException, ApiKeyException {
    SparkJobConfiguration sparkJobConfiguration = (SparkJobConfiguration) js.getJobConfig();
    // If user selected Python we should use the default spark configuration for Spark/PySpark kernels
    if (js.isPythonKernel()) {
        sparkJobConfiguration = (SparkJobConfiguration) jobController.getConfiguration(project, JobType.SPARK, true);
    }
    SparkConfigurationUtil sparkConfigurationUtil = new SparkConfigurationUtil();
    Map<String, String> extraJavaOptions = new HashMap<>();
    extraJavaOptions.put(Settings.LOGSTASH_JOB_INFO, project.getName().toLowerCase() + ",jupyter,notebook,?");
    HashMap<String, String> finalSparkConfiguration = new HashMap<>();
    finalSparkConfiguration.put(Settings.SPARK_DRIVER_STAGINGDIR_ENV, "hdfs:///Projects/" + project.getName() + "/Resources/.sparkStaging");
    // Set Hopsworks consul service domain, don't use the address, use the name
    String hopsworksRestEndpoint = "https://" + serviceDiscoveryController.constructServiceFQDNWithPort(ServiceDiscoveryController.HopsworksService.HOPSWORKS_APP);
    finalSparkConfiguration.putAll(sparkConfigurationUtil.setFrameworkProperties(project, sparkJobConfiguration, settings, hdfsUser, hopsworksUser, extraJavaOptions, kafkaBrokers.getKafkaBrokersString(), hopsworksRestEndpoint, servingConfig, serviceDiscoveryController));
    StringBuilder sparkConfBuilder = new StringBuilder();
    ArrayList<String> keys = new ArrayList<>(finalSparkConfiguration.keySet());
    Collections.sort(keys);
    for (String configKey : keys) {
        sparkConfBuilder.append("\t\"" + configKey + "\":\"" + finalSparkConfiguration.get(configKey) + "\"," + "\n");
    }
    sparkConfBuilder.deleteCharAt(sparkConfBuilder.lastIndexOf(","));
    try {
        Service livyService = serviceDiscoveryController.getAnyAddressOfServiceWithDNS(ServiceDiscoveryController.HopsworksService.LIVY);
        SparkMagicConfigTemplateBuilder templateBuilder = SparkMagicConfigTemplateBuilder.newBuilder().setLivyIp(livyService.getAddress()).setJupyterHome(confDirPath).setDriverCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_DRIVER_CORES_ENV))).setDriverMemory(finalSparkConfiguration.get(Settings.SPARK_DRIVER_MEMORY_ENV)).setLivyStartupTimeout(settings.getLivyStartupTimeout());
        if (sparkJobConfiguration.isDynamicAllocationEnabled() || sparkJobConfiguration.getExperimentType() != null) {
            templateBuilder.setNumExecutors(1);
        } else {
            templateBuilder.setNumExecutors(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_NUMBER_EXECUTORS_ENV)));
        }
        templateBuilder.setExecutorCores(Integer.parseInt(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_CORES_ENV))).setExecutorMemory(finalSparkConfiguration.get(Settings.SPARK_EXECUTOR_MEMORY_ENV)).setHdfsUser(hdfsUser).setYarnQueue(sparkJobConfiguration.getAmQueue()).setHadoopHome(settings.getHadoopSymbolicLinkDir()).setHadoopVersion(settings.getHadoopVersion()).setSparkConfiguration(sparkConfBuilder.toString());
        Map<String, Object> dataModel = new HashMap<>(1);
        dataModel.put("conf", templateBuilder.build());
        templateEngine.template(SparkMagicConfigTemplate.TEMPLATE_NAME, dataModel, out);
    } catch (TemplateException | ServiceDiscoveryException ex) {
        throw new IOException(ex);
    }
}
Also used : HashMap(java.util.HashMap) TemplateException(freemarker.template.TemplateException) SparkJobConfiguration(io.hops.hopsworks.persistence.entity.jobs.configuration.spark.SparkJobConfiguration) SparkMagicConfigTemplateBuilder(io.hops.hopsworks.common.util.templates.jupyter.SparkMagicConfigTemplateBuilder) ArrayList(java.util.ArrayList) Service(com.logicalclocks.servicediscoverclient.service.Service) IOException(java.io.IOException) SparkConfigurationUtil(io.hops.hopsworks.common.util.SparkConfigurationUtil) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException)

Example 18 with ServiceDiscoveryException

use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.

the class JupyterConfigFilesGenerator method generateConfiguration.

public JupyterPaths generateConfiguration(Project project, String secretConfig, String hdfsUser, Users hopsworksUser, JupyterSettings js, Integer port, String allowOrigin) throws ServiceException, JobException {
    boolean newDir = false;
    JupyterPaths jp = generateJupyterPaths(project, hdfsUser, secretConfig);
    try {
        newDir = createJupyterDirs(jp);
        createConfigFiles(jp, hdfsUser, hopsworksUser, project, port, js, allowOrigin);
    } catch (IOException | ServiceException | ServiceDiscoveryException | ApiKeyException e) {
        if (newDir) {
            // if the folder was newly created delete it
            removeProjectUserDirRecursive(jp);
        }
        LOGGER.log(Level.SEVERE, "Error in initializing JupyterConfig for project: {0}. {1}", new Object[] { project.getName(), e });
        throw new ServiceException(RESTCodes.ServiceErrorCode.JUPYTER_ADD_FAILURE, Level.SEVERE, null, e.getMessage(), e);
    }
    return jp;
}
Also used : ApiKeyException(io.hops.hopsworks.exceptions.ApiKeyException) ServiceException(io.hops.hopsworks.exceptions.ServiceException) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) IOException(java.io.IOException)

Example 19 with ServiceDiscoveryException

use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.

the class ProjectController method addServiceFeaturestore.

/**
 * Add the featurestore service to the project,
 * 1. create the hive database for the featurestore
 * 2. insert featurestore metadata in the hopsworks db
 * 3. create a hopsworks dataset for the featurestore
 * 4. create a directory in resources to store json configurations for feature import jobs.
 *
 * @param project the project to add the featurestore service for
 * @param user the user adding the service
 * @param dfso dfso
 */
private void addServiceFeaturestore(Project project, Users user, DistributedFileSystemOps dfso, ProvTypeDTO datasetProvCore) throws FeaturestoreException, ProjectException, UserException {
    String featurestoreName = featurestoreController.getOfflineFeaturestoreDbName(project);
    try {
        // Create HiveDB for the featurestore
        hiveController.createDatabase(featurestoreName, "Featurestore database for project: " + project.getName());
        // Store featurestore metadata in Hopsworks
        Dataset trainingDatasets = datasetController.getByProjectAndDsName(project, null, project.getName() + "_" + Settings.ServiceDataset.TRAININGDATASETS.getName());
        Featurestore featurestore = featurestoreController.createProjectFeatureStore(project, user, featurestoreName, trainingDatasets);
        // Create Hopsworks Dataset of the HiveDb
        hiveController.createDatasetDb(project, user, dfso, featurestoreName, DatasetType.FEATURESTORE, featurestore, datasetProvCore);
        // Register built-in transformation function.
        transformationFunctionController.registerBuiltInTransformationFunctions(user, project, featurestore);
    } catch (SQLException | IOException | ServiceDiscoveryException ex) {
        LOGGER.log(Level.SEVERE, RESTCodes.FeaturestoreErrorCode.COULD_NOT_CREATE_FEATURESTORE.getMessage(), ex);
        throw new FeaturestoreException(RESTCodes.FeaturestoreErrorCode.COULD_NOT_CREATE_FEATURESTORE, Level.SEVERE, "project: " + project.getName(), ex.getMessage(), ex);
    }
}
Also used : Featurestore(io.hops.hopsworks.persistence.entity.featurestore.Featurestore) SQLException(java.sql.SQLException) Dataset(io.hops.hopsworks.persistence.entity.dataset.Dataset) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) IOException(java.io.IOException) FeaturestoreException(io.hops.hopsworks.exceptions.FeaturestoreException)

Example 20 with ServiceDiscoveryException

use of com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException in project hopsworks by logicalclocks.

the class ProjectController method addServiceHive.

private void addServiceHive(Project project, Users user, DistributedFileSystemOps dfso, ProvTypeDTO datasetProvCore) throws ProjectException {
    try {
        hiveController.createDatabase(project.getName(), "Project general-purpose Hive database");
        hiveController.createDatasetDb(project, user, dfso, project.getName(), datasetProvCore);
    } catch (SQLException | IOException | ServiceDiscoveryException ex) {
        throw new ProjectException(RESTCodes.ProjectErrorCode.PROJECT_HIVEDB_CREATE_ERROR, Level.SEVERE, "project: " + project.getName(), ex.getMessage(), ex);
    }
}
Also used : ProjectException(io.hops.hopsworks.exceptions.ProjectException) SQLException(java.sql.SQLException) ServiceDiscoveryException(com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException) IOException(java.io.IOException)

Aggregations

ServiceDiscoveryException (com.logicalclocks.servicediscoverclient.exceptions.ServiceDiscoveryException)32 IOException (java.io.IOException)16 ServiceException (io.hops.hopsworks.exceptions.ServiceException)11 Service (com.logicalclocks.servicediscoverclient.service.Service)7 FeaturestoreException (io.hops.hopsworks.exceptions.FeaturestoreException)6 SQLException (java.sql.SQLException)4 TransactionAttribute (javax.ejb.TransactionAttribute)4 FeaturestoreStorageConnectorDTO (io.hops.hopsworks.common.featurestore.storageconnectors.FeaturestoreStorageConnectorDTO)3 ServiceDiscoveryController (io.hops.hopsworks.common.hosts.ServiceDiscoveryController)3 Dataset (io.hops.hopsworks.persistence.entity.dataset.Dataset)3 Featurestore (io.hops.hopsworks.persistence.entity.featurestore.Featurestore)3 ArrayList (java.util.ArrayList)3 Date (java.util.Date)3 List (java.util.List)3 Level (java.util.logging.Level)3 HttpHost (org.apache.http.HttpHost)3 TemplateException (freemarker.template.TemplateException)2 FeaturestoreHopsfsConnectorDTO (io.hops.hopsworks.common.featurestore.storageconnectors.hopsfs.FeaturestoreHopsfsConnectorDTO)2 Settings (io.hops.hopsworks.common.util.Settings)2 ProjectException (io.hops.hopsworks.exceptions.ProjectException)2