use of org.talend.hadoop.distribution.ESparkVersion in project tbd-studio-se by Talend.
the class SparkVersionUtil method getSparkVersion.
/**
* This method returns the {@link ESparkVersion} depending on some parameters value coming from the
* tSparkConfiguration in a job.
*
* @param process a job that contains spark configuration information.
* @return A spark version if one can be found from the process. Otherwise null.
*/
public static ESparkVersion getSparkVersion(IProcess process) {
if (process == null) {
return null;
}
// Try to get the version directly from the node.
ESparkVersion sparkVersion = getSparkVersionFromElementParameters(process);
if (sparkVersion != null) {
return sparkVersion;
}
// Try to get the version from any compatible node in the process.
// $NON-NLS-1$
List<? extends INode> sparkConfigs = process.getNodesOfType("tSparkConfiguration");
if (sparkConfigs != null && sparkConfigs.size() > 0) {
return getSparkVersion(sparkConfigs.get(0), false);
}
return null;
}
use of org.talend.hadoop.distribution.ESparkVersion in project tdi-studio-se by Talend.
the class Expression method evaluateSparkVersion.
// should be private, but need to unitary tested
public static boolean evaluateSparkVersion(String simpleExpression, List<? extends IElementParameter> listParam, ElementParameter currentParam) {
INode node = retrieveNodeElementFromParameter(currentParam, listParam);
ESparkVersion version = SparkVersionUtil.getSparkVersion(node);
if (version == null) {
return false;
}
//$NON-NLS-1$
Pattern p = java.util.regex.Pattern.compile("(lt|le|gt|ge|eq|ne)\\s*'(SPARK_.*)'");
Matcher m = p.matcher(simpleExpression);
if (m.find()) {
ESparkVersion versionToTest = ESparkVersion.valueOf(m.group(2));
switch(m.group(1)) {
case //$NON-NLS-1$
"lt":
return version.compareTo(versionToTest) < 0;
case //$NON-NLS-1$
"le":
return version.compareTo(versionToTest) <= 0;
case //$NON-NLS-1$
"gt":
return version.compareTo(versionToTest) > 0;
case //$NON-NLS-1$
"ge":
return version.compareTo(versionToTest) >= 0;
case //$NON-NLS-1$
"eq":
return version.compareTo(versionToTest) == 0;
case //$NON-NLS-1$
"ne":
return version.compareTo(versionToTest) != 0;
}
}
return false;
}
use of org.talend.hadoop.distribution.ESparkVersion in project tbd-studio-se by Talend.
the class DynamicModuleAdapter method adapt.
public List<IDynamicConfiguration> adapt(IDynamicMonitor monitor, boolean multiThread) throws Exception {
DynamicDistributionUtils.checkCancelOrNot(monitor);
resolve();
if (monitor != null) {
String mvnUri = moduleBean.getMvnUri();
if (StringUtils.isEmpty(mvnUri)) {
mvnUri = getMvnUri();
}
monitor.setTaskName(// $NON-NLS-1$
Messages.getString("DynamicModuleAdapter.monitor.buildModule", moduleBean.getId(), mvnUri));
}
TemplateBean templateBean = getTemplateBean();
DynamicConfiguration configuration = getConfiguration();
String distribution = configuration.getDistribution();
String hadoopVersion = configuration.getVersion();
String id = configuration.getId();
List<ESparkVersion> selectedSparkVersions = configuration.getSelectedSparkVersions();
List<IDynamicConfiguration> librariesNeeded = new ArrayList<>();
List<String> sparkVersions = moduleBean.getSupportedSparkVersions();
if (sparkVersions != null && !sparkVersions.isEmpty()) {
boolean isSupport = false;
for (String sparkVersion : sparkVersions) {
try {
ESparkVersion eSparkVersion = ESparkVersion.valueOf(sparkVersion);
if (selectedSparkVersions.contains(eSparkVersion)) {
isSupport = true;
break;
}
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
if (!isSupport) {
this.isSkipped = true;
return librariesNeeded;
}
}
String type = moduleBean.getType();
if (ModuleBean.TYPE_BASE.equalsIgnoreCase(type)) {
String groupId = moduleBean.getGroupId();
String artifactId = moduleBean.getArtifactId();
String scope = moduleBean.getScope();
String extension = moduleBean.getExtension();
String classifier = moduleBean.getClassifier();
String moduleVersion = moduleBean.getVersion();
String useStudioRepository = moduleBean.getUseStudioRepository();
if (StringUtils.isNotEmpty(useStudioRepository)) {
ExceptionHandler.process(// $NON-NLS-1$
new Exception("Currently useStudioRepository is only supported by STANDARD type, will be ignored"), Priority.WARN);
}
boolean useLatest = Boolean.valueOf(moduleBean.getUseLatest());
if (StringUtils.isBlank(extension)) {
extension = null;
}
List<ExclusionBean> exclusionBeans = moduleBean.getExclusions();
List<ExclusionNode> exclusions = null;
if (exclusionBeans != null && !exclusionBeans.isEmpty()) {
exclusions = adaptExclusions(exclusionBeans, monitor);
}
DependencyNode dependencyNode = null;
DependencyNode baseNode = new DependencyNode();
baseNode.setGroupId(groupId);
baseNode.setArtifactId(artifactId);
baseNode.setClassifier(classifier);
baseNode.setExtension(extension);
baseNode.setScope(scope);
if (useLatest) {
moduleVersion = dependencyResolver.getLatestVersion(groupId, artifactId, null, null, monitor);
}
baseNode.setVersion(moduleVersion);
if (exclusions != null && !exclusions.isEmpty()) {
baseNode.setExclusions(exclusions);
}
try {
dependencyNode = dependencyResolver.collectDependencies(baseNode, monitor, multiThread);
librariesNeeded = createLibrariesNeeded(dependencyNode, distribution, hadoopVersion, id, moduleBean, runtimeIds, templateBean);
} catch (VersionNotFoundException e) {
ExceptionHandler.process(e);
}
} else if (ModuleBean.TYPE_REFERENCE.equalsIgnoreCase(type)) {
List<ExclusionBean> exclusions = moduleBean.getExclusions();
if (exclusions != null && !exclusions.isEmpty()) {
throw new UnsupportedOperationException(// $NON-NLS-1$
Messages.getString("DynamicModuleAdapter.exception.exclusion.unsupport", type));
}
String jarName = moduleBean.getJarName();
ModuleNeeded moduleNeeded = existingModuleMap.get(jarName);
if (moduleNeeded == null) {
throw new UnsupportedOperationException(// $NON-NLS-1$
Messages.getString("DynamicModuleAdapter.exception.reference.notFound", jarName));
}
runtimeIds.add(moduleNeeded.getId());
} else if (ModuleBean.TYPE_STANDARD.equalsIgnoreCase(type)) {
List<ExclusionBean> exclusions = moduleBean.getExclusions();
if (exclusions != null && !exclusions.isEmpty()) {
throw new UnsupportedOperationException(// $NON-NLS-1$
Messages.getString("DynamicModuleAdapter.exception.exclusion.unsupport", type));
}
String beanId = moduleBean.getId();
String runtimeId = DynamicDistributionUtils.getPluginKey(distribution, hadoopVersion, id, beanId);
if (!registedModules.contains(runtimeId)) {
IDynamicConfiguration libraryNeeded = createLibraryNeeded(moduleBean);
libraryNeeded.setAttribute(ATTR_ID, runtimeId);
librariesNeeded.add(libraryNeeded);
List<String> registedRuntimeIds = new ArrayList<>();
registedRuntimeIds.add(runtimeId);
registedModules.add(runtimeId);
}
runtimeIds.add(runtimeId);
}
return librariesNeeded;
}
use of org.talend.hadoop.distribution.ESparkVersion in project tbd-studio-se by Talend.
the class DynamicModuleGroupAdapter method adapt.
public IDynamicConfiguration adapt(IDynamicMonitor monitor) throws Exception {
DynamicDistributionUtils.checkCancelOrNot(monitor);
resolve();
if (monitor != null) {
monitor.setTaskName(// $NON-NLS-1$
Messages.getString("DynamicModuleGroupAdapter.monitor.buildModuleGroup", moduleGroupBean.getId()));
}
DynamicConfiguration configuration = getConfiguration();
String distribution = configuration.getDistribution();
String version = configuration.getVersion();
String id = configuration.getId();
String moduleGroupId = moduleGroupBean.getId();
String description = moduleGroupBean.getDescription();
List<ESparkVersion> selectedSparkVersions = configuration.getSelectedSparkVersions();
List<String> sparkVersions = moduleGroupBean.getSupportedSparkVersions();
if (sparkVersions != null && !sparkVersions.isEmpty()) {
boolean isSupport = false;
for (String sparkVersion : sparkVersions) {
try {
ESparkVersion eSparkVersion = ESparkVersion.valueOf(sparkVersion);
if (selectedSparkVersions.contains(eSparkVersion)) {
isSupport = true;
break;
}
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
if (!isSupport) {
return null;
}
}
runtimeId = DynamicDistributionUtils.getPluginKey(distribution, version, id, moduleGroupId);
IDynamicConfiguration dynamicModuleGroup = DynamicFactory.getInstance().createDynamicConfiguration();
dynamicModuleGroup.setConfigurationName(TAG_NAME);
dynamicModuleGroup.setAttribute(ATTR_ID, runtimeId);
dynamicModuleGroup.setAttribute(ATTR_GROUP_TEMPLATE_ID, moduleGroupId);
dynamicModuleGroup.setAttribute(ATTR_DESCRIPTION, description);
List<String> modules = moduleGroupBean.getModules();
Set<String> runtimeModulesSet = new HashSet<>();
if (modules != null) {
for (String module : modules) {
DynamicModuleAdapter moduleAdapter = moduleBeanAdapterMap.get(module);
if (moduleAdapter == null) {
throw new Exception(// $NON-NLS-1$
Messages.getString(// $NON-NLS-1$
"DynamicModuleGroupAdapter.exception.noModuleAdapterFound", // $NON-NLS-1$
module, DynamicModuleAdapter.class.getName()));
}
if (moduleAdapter.isSkipped()) {
continue;
}
List<String> runtimeIds = moduleAdapter.getRuntimeIds();
if (runtimeIds == null || runtimeIds.isEmpty()) {
// means it is using existing modules of studio
runtimeModulesSet.add(module);
} else {
runtimeModulesSet.addAll(runtimeIds);
}
}
if (runtimeModulesSet != null) {
for (String runtimeModule : runtimeModulesSet) {
IDynamicConfiguration createDynamicLibrary = createDynamicLibrary(runtimeModule);
dynamicModuleGroup.addChildConfiguration(createDynamicLibrary);
}
}
}
this.runtimeModules.addAll(runtimeModulesSet);
return dynamicModuleGroup;
}
use of org.talend.hadoop.distribution.ESparkVersion in project tbd-studio-se by Talend.
the class DynamicDistributionUtils method convert2SparkVersions.
public static List<String> convert2SparkVersions(List<ESparkVersion> eSparkVersions) {
List<String> sparkVersions = new ArrayList<>();
for (ESparkVersion eSparkVersion : eSparkVersions) {
sparkVersions.add(eSparkVersion.name());
}
Collections.sort(sparkVersions, Collections.reverseOrder());
return sparkVersions;
}
Aggregations