use of io.hops.hopsworks.persistence.entity.jobs.configuration.flink.FlinkJobConfiguration in project hopsworks by logicalclocks.
the class JobController method getConfiguration.
@TransactionAttribute(TransactionAttributeType.NEVER)
public JobConfiguration getConfiguration(Project project, JobType jobType, boolean useDefaultConfig) {
Optional<DefaultJobConfiguration> defaultConfig;
if (jobType.equals(JobType.SPARK) || jobType.equals(JobType.PYSPARK)) {
/**
* The Spark and PySpark configuration is stored in the same configuration entry in the
* database for DefaultJobConfiguration. Namely in a PySpark configuration. We infer the JobType based on if
* you set a .jar or .py. However when creating the DefaultJobConfiguration, as part of the PK the JobType
* needs to be set. So for now PySpark/Spark shares the same configuration.
*/
defaultConfig = project.getDefaultJobConfigurationCollection().stream().filter(conf -> conf.getDefaultJobConfigurationPK().getType().equals(JobType.PYSPARK)).findFirst();
defaultConfig.ifPresent(defaultJobConfiguration -> ((SparkJobConfiguration) defaultJobConfiguration.getJobConfig()).setMainClass(null));
} else {
defaultConfig = project.getDefaultJobConfigurationCollection().stream().filter(conf -> conf.getDefaultJobConfigurationPK().getType().equals(jobType)).findFirst();
}
if (defaultConfig.isPresent()) {
return defaultConfig.get().getJobConfig();
} else if (useDefaultConfig) {
switch(jobType) {
case SPARK:
case PYSPARK:
return new SparkJobConfiguration();
case FLINK:
return new FlinkJobConfiguration();
default:
throw new IllegalArgumentException("Job type not supported: " + jobType);
}
} else {
return null;
}
}
use of io.hops.hopsworks.persistence.entity.jobs.configuration.flink.FlinkJobConfiguration in project hopsworks by logicalclocks.
the class FlinkConfigurationUtil method setFrameworkProperties.
@Override
public Map<String, String> setFrameworkProperties(Project project, JobConfiguration jobConfiguration, Settings settings, String hdfsUser, Users hopsworksUser, Map<String, String> extraJavaOptions, String kafkaBrokersString, String hopsworksRestEndpoint, ServingConfig servingConfig, ServiceDiscoveryController serviceDiscoveryController) throws IOException {
FlinkJobConfiguration flinkJobConfiguration = (FlinkJobConfiguration) jobConfiguration;
Map<String, ConfigProperty> flinkProps = new HashMap<>();
flinkProps.put(Settings.JOB_LOG4J_CONFIG, new ConfigProperty(Settings.JOB_LOG4J_CONFIG, HopsUtils.IGNORE, settings.getFlinkConfDir() + Settings.JOB_LOG4J_PROPERTIES));
flinkProps.put(Settings.JOB_LOG4J_PROPERTIES, new ConfigProperty(Settings.JOB_LOG4J_PROPERTIES, HopsUtils.IGNORE, settings.getFlinkConfDir() + Settings.JOB_LOG4J_PROPERTIES));
flinkProps.put(Settings.FLINK_STATE_CHECKPOINTS_DIR, new ConfigProperty(Settings.FLINK_STATE_CHECKPOINTS_DIR, HopsUtils.OVERWRITE, "hdfs://" + Utils.getProjectPath(project.getName()) + Settings.PROJECT_STAGING_DIR + "/flink"));
if (extraJavaOptions == null) {
extraJavaOptions = new HashMap<>();
}
extraJavaOptions.put(Settings.JOB_LOG4J_CONFIG, settings.getFlinkConfDir() + Settings.JOB_LOG4J_PROPERTIES);
extraJavaOptions.put(Settings.JOB_LOG4J_PROPERTIES, settings.getFlinkConfDir() + Settings.JOB_LOG4J_PROPERTIES);
extraJavaOptions.put(Settings.HOPSWORKS_REST_ENDPOINT_PROPERTY, hopsworksRestEndpoint);
extraJavaOptions.put(Settings.HOPSUTIL_INSECURE_PROPERTY, String.valueOf(settings.isHopsUtilInsecure()));
extraJavaOptions.put(Settings.SERVER_TRUSTSTORE_PROPERTY, Settings.SERVER_TRUSTSTORE_PROPERTY);
extraJavaOptions.put(Settings.HOPSWORKS_ELASTIC_ENDPOINT_PROPERTY, settings.getElasticRESTEndpoint());
extraJavaOptions.put(Settings.HOPSWORKS_PROJECTID_PROPERTY, Integer.toString(project.getId()));
extraJavaOptions.put(Settings.HOPSWORKS_PROJECTNAME_PROPERTY, project.getName());
extraJavaOptions.put(Settings.HOPSWORKS_PROJECTUSER_PROPERTY, hdfsUser);
extraJavaOptions.put(Settings.KAFKA_BROKERADDR_PROPERTY, kafkaBrokersString);
extraJavaOptions.put(Settings.HOPSWORKS_JOBTYPE_PROPERTY, jobConfiguration.getJobType().name());
if (jobConfiguration.getAppName() != null) {
extraJavaOptions.put(Settings.HOPSWORKS_JOBNAME_PROPERTY, jobConfiguration.getAppName());
}
StringBuilder extraJavaOptionsSb = new StringBuilder();
for (String key : extraJavaOptions.keySet()) {
extraJavaOptionsSb.append(" -D").append(key).append("=").append(extraJavaOptions.get(key));
}
flinkProps.put(Settings.FLINK_ENV_JAVA_OPTS, new ConfigProperty(Settings.FLINK_ENV_JAVA_OPTS, HopsUtils.APPEND_SPACE, extraJavaOptionsSb.toString()));
Map<String, String> validatedFlinkProperties = HopsUtils.parseUserProperties(flinkJobConfiguration.getProperties());
// Merge system and user defined properties
return HopsUtils.mergeHopsworksAndUserParams(flinkProps, validatedFlinkProperties);
}
Aggregations