use of org.talend.core.hadoop.IHadoopClusterService in project tdi-studio-se by Talend.
the class ComponentChooseDialog method isEnabled.
@Override
public boolean isEnabled(DropTargetEvent e) {
Object obj = getSelection().getFirstElement();
if (obj instanceof RepositoryNode) {
RepositoryNode sourceNode = (RepositoryNode) obj;
if (PluginChecker.isCDCPluginLoaded()) {
ICDCProviderService service = (ICDCProviderService) GlobalServiceRegister.getDefault().getService(ICDCProviderService.class);
if (service != null && (service.isSubscriberTableNode(sourceNode) || service.isSystemSubscriberTable(sourceNode))) {
return false;
}
}
IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterNode(sourceNode)) {
return false;
}
IOozieService oozieService = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(IOozieService.class)) {
oozieService = (IOozieService) GlobalServiceRegister.getDefault().getService(IOozieService.class);
}
if (oozieService != null && oozieService.isOozieNode(sourceNode)) {
return false;
}
ISAPProviderService sapService = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(ISAPProviderService.class)) {
sapService = (ISAPProviderService) GlobalServiceRegister.getDefault().getService(ISAPProviderService.class);
}
if (sapService != null && sapService.isSAPNode(sourceNode)) {
return false;
}
}
return !this.editor.getProcess().isReadOnly();
}
use of org.talend.core.hadoop.IHadoopClusterService in project tdi-studio-se by Talend.
the class ComponentChooseDialog method propaHadoopCfgChanges.
/**
* DOC ycbai Comment method "propaHadoopCfgChanges".
*
* <P>
* Propagate the changes from hadoop cluster to M/R process when drag&drop hadoop subnode from repository view to
* M/R process.
* </P>
*
* @param repositoryNode
*/
private void propaHadoopCfgChanges(IRepositoryNode repositoryNode) {
if (repositoryNode == null || repositoryNode.getObject() == null) {
return;
}
IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
if (hadoopClusterService == null || !hadoopClusterService.isHadoopSubnode(repositoryNode)) {
return;
}
IProcess process = editor.getProcess();
if (!ComponentCategory.CATEGORY_4_MAPREDUCE.getName().equals(process.getComponentsType()) && !ComponentCategory.CATEGORY_4_SPARK.getName().equals(process.getComponentsType()) && !ComponentCategory.CATEGORY_4_SPARKSTREAMING.getName().equals(process.getComponentsType())) {
return;
}
if ((process instanceof IProcess2) && (((IProcess2) process).getProperty().getItem() instanceof JobletProcessItem)) {
return;
}
Item subItem = repositoryNode.getObject().getProperty().getItem();
String propertyParamName = MR_PROPERTY_PREFIX + EParameterName.PROPERTY_TYPE.getName();
String propertyRepTypeParamName = MR_PROPERTY_PREFIX + EParameterName.REPOSITORY_PROPERTY_TYPE.getName();
IElementParameter propertyParam = process.getElementParameter(propertyParamName);
if (propertyParam == null) {
return;
}
String repositoryValue = propertyParam.getRepositoryValue();
if (repositoryValue == null) {
return;
}
//$NON-NLS-1$
String[] supportedRepositoryTypes = repositoryValue.split("\\|");
String repositoryType = hadoopClusterService.getRepositoryTypeOfHadoopSubItem(subItem);
if (!ArrayUtils.contains(supportedRepositoryTypes, repositoryType)) {
return;
}
Item hadoopClusterItem = hadoopClusterService.getHadoopClusterBySubitemId(new Project(ProjectManager.getInstance().getProject(subItem)), subItem.getProperty().getId());
String hadoopClusterId = hadoopClusterItem.getProperty().getId();
if (EmfComponent.REPOSITORY.equals(propertyParam.getValue())) {
// do nothing when select the same hadoop cluster.
String propertyId = (String) process.getElementParameter(propertyRepTypeParamName).getValue();
if (hadoopClusterId.equals(propertyId)) {
return;
}
}
Connection connection = ((ConnectionItem) subItem).getConnection();
if (hadoopClusterService.hasDiffsFromClusterToProcess(subItem, process)) {
boolean confirmUpdate = MessageDialog.openConfirm(editor.getSite().getShell(), //$NON-NLS-1$
Messages.getString("TalendEditorDropTargetListener.updateHadoopCfgDialog.title"), //$NON-NLS-1$
Messages.getString("TalendEditorDropTargetListener.updateHadoopCfgDialog.msg"));
if (confirmUpdate) {
// Update spark mode to YARN_CLIENT if repository
if (ComponentCategory.CATEGORY_4_SPARK.getName().equals(process.getComponentsType()) || ComponentCategory.CATEGORY_4_SPARKSTREAMING.getName().equals(process.getComponentsType())) {
IElementParameter sparkLocalParam = process.getElementParameter(HadoopConstants.SPARK_LOCAL_MODE);
IElementParameter sparkParam = process.getElementParameter(HadoopConstants.SPARK_MODE);
if (sparkLocalParam != null && (Boolean) (sparkLocalParam.getValue())) {
sparkLocalParam.setValue(false);
}
if (sparkParam != null && !HadoopConstants.SPARK_MODE_YARN_CLIENT.equals(sparkParam.getValue())) {
sparkParam.setValue(HadoopConstants.SPARK_MODE_YARN_CLIENT);
}
}
propertyParam.setValue(EmfComponent.REPOSITORY);
ChangeValuesFromRepository command = new ChangeValuesFromRepository(process, connection, propertyRepTypeParamName, subItem.getProperty().getId());
execCommandStack(command);
}
}
}
use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.
the class AbstractCheckedServiceProvider method addCustomConfsJarIfNeeded.
protected ClassLoader addCustomConfsJarIfNeeded(ClassLoader baseLoader, HadoopServiceProperties serviceProperties, EHadoopCategory category) {
ClassLoader classLoader = baseLoader;
if (serviceProperties.isUseCustomConfs()) {
if (classLoader instanceof DynamicClassLoader) {
String customConfsJarName;
IHadoopClusterService hadoopClusterService = getHadoopClusterService();
if (hadoopClusterService != null) {
customConfsJarName = hadoopClusterService.getCustomConfsJar(serviceProperties.getItem(), true, true).map(b -> b.getCustomConfJarName()).orElse(null);
} else {
customConfsJarName = HadoopParameterUtil.getConfsJarDefaultName(serviceProperties.getRelativeHadoopClusterLabel());
}
boolean confFileExist = false;
Set<String> libraries = ((DynamicClassLoader) classLoader).getLibraries();
for (String lib : libraries) {
if (customConfsJarName.equals(lib)) {
confFileExist = true;
break;
}
}
boolean rebuildClassloader = false;
Set<String> addedJarSet = new HashSet<>();
Set<String> excludedJarSet = new HashSet<>();
Consumer<DynamicClassLoader> afterLoaded = null;
if (serviceProperties.isSetHadoopConf()) {
String hadoopConfSpecificJarPath = serviceProperties.getHadoopConfSpecificJar();
boolean jarInvalid = false;
if (StringUtils.isBlank(hadoopConfSpecificJarPath) || !new File(hadoopConfSpecificJarPath).exists()) {
jarInvalid = true;
}
if (jarInvalid) {
ExceptionHandler.process(new Exception("Hadoop configuration JAR path invalid: " + hadoopConfSpecificJarPath));
} else {
afterLoaded = (t) -> t.addLibrary(hadoopConfSpecificJarPath);
}
excludedJarSet.add(customConfsJarName);
// remove the default jars, since it will be conflict with the new jars
excludedJarSet.addAll(Arrays.asList(HadoopClassLoaderFactory2.getSecurityJars(category)));
rebuildClassloader = true;
} else {
if (!confFileExist) {
addedJarSet.add(customConfsJarName);
// remove the default jars, since it will be conflict with the new jars
excludedJarSet.addAll(Arrays.asList(HadoopClassLoaderFactory2.getSecurityJars(category)));
rebuildClassloader = true;
}
}
if (rebuildClassloader) {
try {
classLoader = DynamicClassLoader.createNewOneBaseLoader((DynamicClassLoader) baseLoader, addedJarSet.toArray(new String[0]), excludedJarSet.toArray(new String[0]));
if (afterLoaded != null) {
afterLoaded.accept((DynamicClassLoader) classLoader);
}
} catch (MalformedURLException e) {
ExceptionHandler.process(e);
}
}
}
}
return classLoader;
}
use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.
the class HadoopClusterImportHandler method findRelatedImportItems.
@Override
public List<ImportItem> findRelatedImportItems(IProgressMonitor monitor, ResourcesManager resManager, ImportItem importItem, ImportItem[] allImportItemRecords) throws Exception {
List<ImportItem> relatedItemRecords = new ArrayList<ImportItem>();
relatedItemRecords.addAll(super.findRelatedImportItems(monitor, resManager, importItem, allImportItemRecords));
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopClusterService.class)) {
IHadoopClusterService hadoopClusterService = (IHadoopClusterService) GlobalServiceRegister.getDefault().getService(IHadoopClusterService.class);
final Item item = importItem.getItem();
if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterItem(item)) {
resolveItem(resManager, importItem);
HadoopClusterConnection hcConnection = (HadoopClusterConnection) ((HadoopClusterConnectionItem) item).getConnection();
String clusterId = item.getProperty().getId();
for (ImportItem ir : allImportItemRecords) {
resolveItem(resManager, ir);
Item subItem = ir.getItem();
String hcId = null;
if (subItem instanceof HadoopSubConnectionItem) {
hcId = ((HadoopSubConnection) ((HadoopSubConnectionItem) subItem).getConnection()).getRelativeHadoopClusterId();
} else if (subItem instanceof DatabaseConnectionItem) {
hcId = ((DatabaseConnection) ((DatabaseConnectionItem) subItem).getConnection()).getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID);
}
if (clusterId.equals(hcId)) {
if (subItem instanceof HadoopSubConnectionItem) {
EList<String> connectionList = hcConnection.getConnectionList();
String subItemId = subItem.getProperty().getId();
if (!connectionList.contains(subItemId)) {
connectionList.add(subItemId);
}
}
relatedItemRecords.add(ir);
}
}
}
}
return relatedItemRecords;
}
use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.
the class HadoopServerUtil method getConfiguration.
public static Object getConfiguration(HDFSConnectionBean connection, ClassLoader currentClassLoader) throws HadoopServerException {
Object conf = null;
String userName = StringUtils.trimToNull(connection.getUserName());
String namenodePrincipal = StringUtils.trimToNull(connection.getPrincipal());
String group = StringUtils.trimToNull(connection.getGroup());
boolean enableKerberos = connection.isEnableKerberos();
boolean useKeytab = connection.isUseKeytab();
String keytabPrincipal = StringUtils.trimToNull(connection.getKeytabPrincipal());
String keytab = StringUtils.trimToNull(connection.getKeytab());
String nameNodeURI = connection.getNameNodeURI();
nameNodeURI = TalendQuoteUtils.removeQuotesIfExist(nameNodeURI);
if (userName != null) {
userName = TalendQuoteUtils.removeQuotesIfExist(userName);
}
if (namenodePrincipal != null) {
namenodePrincipal = TalendQuoteUtils.removeQuotesIfExist(namenodePrincipal);
}
if (group != null) {
group = TalendQuoteUtils.removeQuotesIfExist(group);
}
if (keytabPrincipal != null) {
keytabPrincipal = TalendQuoteUtils.removeQuotesIfExist(keytabPrincipal);
}
if (keytab != null) {
keytab = TalendQuoteUtils.removeQuotesIfExist(keytab);
}
if (HadoopClassLoaderUtil.isWebHDFS(nameNodeURI)) {
IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
String hcId = connection.getRelativeHadoopClusterId();
if (StringUtils.isNotBlank(hcId) && hadoopClusterService != null) {
Map<String, String> parameters = hadoopClusterService.getHadoopDbParameters(hcId);
if (parameters.size() > 0) {
ContextType contextType = hadoopClusterService.getHadoopClusterContextType(hcId);
if (contextType != null) {
connection.setParentContextType(contextType);
}
boolean isUseSSL = Boolean.parseBoolean(parameters.get(ConnParameterKeys.CONN_PARA_KEY_USE_WEBHDFS_SSL));
String trustStorePath = connection.getRealValue(parameters.get(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PATH), true);
String trustStorePassword = connection.getRealValue(parameters.get(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PASSWORD), true);
HadoopRepositoryUtil.setSSLSystemProperty(isUseSSL, nameNodeURI, trustStorePath, trustStorePassword);
}
}
}
ClassLoader classLoader = currentClassLoader;
ClassLoader oldClassLoaderLoader = Thread.currentThread().getContextClassLoader();
try {
if (classLoader == null) {
classLoader = getClassLoader(connection);
}
Thread.currentThread().setContextClassLoader(classLoader);
// $NON-NLS-1$
conf = Class.forName("org.apache.hadoop.conf.Configuration", true, classLoader).newInstance();
EHadoopConfProperties.FS_DEFAULT_URI.set(conf, nameNodeURI);
if (enableKerberos) {
assert namenodePrincipal != null;
userName = null;
EHadoopConfProperties.KERBEROS_PRINCIPAL.set(conf, namenodePrincipal);
// $NON-NLS-1$
EHadoopConfProperties.AUTHENTICATION.set(conf, "KERBEROS");
}
if (group != null) {
assert userName != null;
EHadoopConfProperties.JOB_UGI.set(conf, userName + GROUP_SEPARATOR + group);
}
if (useKeytab) {
assert keytabPrincipal != null;
assert keytab != null;
// $NON-NLS-1$
ReflectionUtils.invokeStaticMethod(// $NON-NLS-1$
"org.apache.hadoop.security.UserGroupInformation", // $NON-NLS-1$
classLoader, "loginUserFromKeytab", // $NON-NLS-1$
new String[] { keytabPrincipal, keytab });
}
Map<String, Object> configurations = connection.getConfigurations();
Iterator<Entry<String, Object>> configsIterator = configurations.entrySet().iterator();
while (configsIterator.hasNext()) {
Entry<String, Object> configEntry = configsIterator.next();
String key = configEntry.getKey();
Object value = configEntry.getValue();
if (key == null) {
continue;
}
ReflectionUtils.invokeMethod(conf, "set", new Object[] { key, String.valueOf(value) }, String.class, // $NON-NLS-1$
String.class);
}
// $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
ReflectionUtils.invokeMethod(// $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
conf, // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
"set", // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
new Object[] { "dfs.client.use.datanode.hostname", "true" }, // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
String.class, String.class);
} catch (Exception e) {
throw new HadoopServerException(e);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoaderLoader);
}
return conf;
}
Aggregations