Search in sources :

Example 1 with IHadoopClusterService

use of org.talend.core.hadoop.IHadoopClusterService in project tdi-studio-se by Talend.

the class ComponentChooseDialog method isEnabled.

@Override
public boolean isEnabled(DropTargetEvent e) {
    Object obj = getSelection().getFirstElement();
    if (obj instanceof RepositoryNode) {
        RepositoryNode sourceNode = (RepositoryNode) obj;
        if (PluginChecker.isCDCPluginLoaded()) {
            ICDCProviderService service = (ICDCProviderService) GlobalServiceRegister.getDefault().getService(ICDCProviderService.class);
            if (service != null && (service.isSubscriberTableNode(sourceNode) || service.isSystemSubscriberTable(sourceNode))) {
                return false;
            }
        }
        IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
        if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterNode(sourceNode)) {
            return false;
        }
        IOozieService oozieService = null;
        if (GlobalServiceRegister.getDefault().isServiceRegistered(IOozieService.class)) {
            oozieService = (IOozieService) GlobalServiceRegister.getDefault().getService(IOozieService.class);
        }
        if (oozieService != null && oozieService.isOozieNode(sourceNode)) {
            return false;
        }
        ISAPProviderService sapService = null;
        if (GlobalServiceRegister.getDefault().isServiceRegistered(ISAPProviderService.class)) {
            sapService = (ISAPProviderService) GlobalServiceRegister.getDefault().getService(ISAPProviderService.class);
        }
        if (sapService != null && sapService.isSAPNode(sourceNode)) {
            return false;
        }
    }
    return !this.editor.getProcess().isReadOnly();
}
Also used : IHadoopClusterService(org.talend.core.hadoop.IHadoopClusterService) IOozieService(org.talend.core.hadoop.IOozieService) ISAPProviderService(org.talend.core.service.ISAPProviderService) SAPFunctionRepositoryObject(org.talend.core.repository.model.repositoryObject.SAPFunctionRepositoryObject) IRepositoryViewObject(org.talend.core.model.repository.IRepositoryViewObject) QueryRepositoryObject(org.talend.core.repository.model.repositoryObject.QueryRepositoryObject) MetadataTableRepositoryObject(org.talend.core.repository.model.repositoryObject.MetadataTableRepositoryObject) SalesforceModuleRepositoryObject(org.talend.core.repository.model.repositoryObject.SalesforceModuleRepositoryObject) MetadataColumnRepositoryObject(org.talend.core.repository.model.repositoryObject.MetadataColumnRepositoryObject) SAPIDocRepositoryObject(org.talend.core.repository.model.repositoryObject.SAPIDocRepositoryObject) RepositoryNode(org.talend.repository.model.RepositoryNode) IRepositoryNode(org.talend.repository.model.IRepositoryNode) ICDCProviderService(org.talend.core.ui.ICDCProviderService)

Example 2 with IHadoopClusterService

use of org.talend.core.hadoop.IHadoopClusterService in project tdi-studio-se by Talend.

the class ComponentChooseDialog method propaHadoopCfgChanges.

/**
     * DOC ycbai Comment method "propaHadoopCfgChanges".
     *
     * <P>
     * Propagate the changes from hadoop cluster to M/R process when drag&drop hadoop subnode from repository view to
     * M/R process.
     * </P>
     *
     * @param repositoryNode
     */
private void propaHadoopCfgChanges(IRepositoryNode repositoryNode) {
    if (repositoryNode == null || repositoryNode.getObject() == null) {
        return;
    }
    IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
    if (hadoopClusterService == null || !hadoopClusterService.isHadoopSubnode(repositoryNode)) {
        return;
    }
    IProcess process = editor.getProcess();
    if (!ComponentCategory.CATEGORY_4_MAPREDUCE.getName().equals(process.getComponentsType()) && !ComponentCategory.CATEGORY_4_SPARK.getName().equals(process.getComponentsType()) && !ComponentCategory.CATEGORY_4_SPARKSTREAMING.getName().equals(process.getComponentsType())) {
        return;
    }
    if ((process instanceof IProcess2) && (((IProcess2) process).getProperty().getItem() instanceof JobletProcessItem)) {
        return;
    }
    Item subItem = repositoryNode.getObject().getProperty().getItem();
    String propertyParamName = MR_PROPERTY_PREFIX + EParameterName.PROPERTY_TYPE.getName();
    String propertyRepTypeParamName = MR_PROPERTY_PREFIX + EParameterName.REPOSITORY_PROPERTY_TYPE.getName();
    IElementParameter propertyParam = process.getElementParameter(propertyParamName);
    if (propertyParam == null) {
        return;
    }
    String repositoryValue = propertyParam.getRepositoryValue();
    if (repositoryValue == null) {
        return;
    }
    //$NON-NLS-1$
    String[] supportedRepositoryTypes = repositoryValue.split("\\|");
    String repositoryType = hadoopClusterService.getRepositoryTypeOfHadoopSubItem(subItem);
    if (!ArrayUtils.contains(supportedRepositoryTypes, repositoryType)) {
        return;
    }
    Item hadoopClusterItem = hadoopClusterService.getHadoopClusterBySubitemId(new Project(ProjectManager.getInstance().getProject(subItem)), subItem.getProperty().getId());
    String hadoopClusterId = hadoopClusterItem.getProperty().getId();
    if (EmfComponent.REPOSITORY.equals(propertyParam.getValue())) {
        // do nothing when select the same hadoop cluster.
        String propertyId = (String) process.getElementParameter(propertyRepTypeParamName).getValue();
        if (hadoopClusterId.equals(propertyId)) {
            return;
        }
    }
    Connection connection = ((ConnectionItem) subItem).getConnection();
    if (hadoopClusterService.hasDiffsFromClusterToProcess(subItem, process)) {
        boolean confirmUpdate = MessageDialog.openConfirm(editor.getSite().getShell(), //$NON-NLS-1$
        Messages.getString("TalendEditorDropTargetListener.updateHadoopCfgDialog.title"), //$NON-NLS-1$
        Messages.getString("TalendEditorDropTargetListener.updateHadoopCfgDialog.msg"));
        if (confirmUpdate) {
            // Update spark mode to YARN_CLIENT if repository
            if (ComponentCategory.CATEGORY_4_SPARK.getName().equals(process.getComponentsType()) || ComponentCategory.CATEGORY_4_SPARKSTREAMING.getName().equals(process.getComponentsType())) {
                IElementParameter sparkLocalParam = process.getElementParameter(HadoopConstants.SPARK_LOCAL_MODE);
                IElementParameter sparkParam = process.getElementParameter(HadoopConstants.SPARK_MODE);
                if (sparkLocalParam != null && (Boolean) (sparkLocalParam.getValue())) {
                    sparkLocalParam.setValue(false);
                }
                if (sparkParam != null && !HadoopConstants.SPARK_MODE_YARN_CLIENT.equals(sparkParam.getValue())) {
                    sparkParam.setValue(HadoopConstants.SPARK_MODE_YARN_CLIENT);
                }
            }
            propertyParam.setValue(EmfComponent.REPOSITORY);
            ChangeValuesFromRepository command = new ChangeValuesFromRepository(process, connection, propertyRepTypeParamName, subItem.getProperty().getId());
            execCommandStack(command);
        }
    }
}
Also used : ChangeValuesFromRepository(org.talend.designer.core.ui.editor.cmd.ChangeValuesFromRepository) ValidationRulesConnectionItem(org.talend.core.model.properties.ValidationRulesConnectionItem) ConnectionItem(org.talend.core.model.properties.ConnectionItem) MDMConnectionItem(org.talend.core.model.properties.MDMConnectionItem) SAPConnectionItem(org.talend.core.model.properties.SAPConnectionItem) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem) EbcdicConnectionItem(org.talend.core.model.properties.EbcdicConnectionItem) HL7ConnectionItem(org.talend.core.model.properties.HL7ConnectionItem) PolylineConnection(org.eclipse.draw2d.PolylineConnection) MDMConnection(org.talend.core.model.metadata.builder.connection.MDMConnection) DatabaseConnection(org.talend.core.model.metadata.builder.connection.DatabaseConnection) CDCConnection(org.talend.core.model.metadata.builder.connection.CDCConnection) Connection(org.talend.core.model.metadata.builder.connection.Connection) ValidationRulesConnectionItem(org.talend.core.model.properties.ValidationRulesConnectionItem) JobletProcessItem(org.talend.core.model.properties.JobletProcessItem) ConnectionItem(org.talend.core.model.properties.ConnectionItem) MDMConnectionItem(org.talend.core.model.properties.MDMConnectionItem) SQLPatternItem(org.talend.core.model.properties.SQLPatternItem) LinkRulesItem(org.talend.core.model.properties.LinkRulesItem) SAPConnectionItem(org.talend.core.model.properties.SAPConnectionItem) ProcessItem(org.talend.core.model.properties.ProcessItem) ContextItem(org.talend.core.model.properties.ContextItem) Item(org.talend.core.model.properties.Item) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem) EbcdicConnectionItem(org.talend.core.model.properties.EbcdicConnectionItem) RulesItem(org.talend.core.model.properties.RulesItem) HL7ConnectionItem(org.talend.core.model.properties.HL7ConnectionItem) FileItem(org.talend.core.model.properties.FileItem) Project(org.talend.core.model.general.Project) IHadoopClusterService(org.talend.core.hadoop.IHadoopClusterService) JobletProcessItem(org.talend.core.model.properties.JobletProcessItem) IProcess2(org.talend.core.model.process.IProcess2) IElementParameter(org.talend.core.model.process.IElementParameter) IProcess(org.talend.core.model.process.IProcess)

Example 3 with IHadoopClusterService

use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.

the class AbstractCheckedServiceProvider method addCustomConfsJarIfNeeded.

protected ClassLoader addCustomConfsJarIfNeeded(ClassLoader baseLoader, HadoopServiceProperties serviceProperties, EHadoopCategory category) {
    ClassLoader classLoader = baseLoader;
    if (serviceProperties.isUseCustomConfs()) {
        if (classLoader instanceof DynamicClassLoader) {
            String customConfsJarName;
            IHadoopClusterService hadoopClusterService = getHadoopClusterService();
            if (hadoopClusterService != null) {
                customConfsJarName = hadoopClusterService.getCustomConfsJar(serviceProperties.getItem(), true, true).map(b -> b.getCustomConfJarName()).orElse(null);
            } else {
                customConfsJarName = HadoopParameterUtil.getConfsJarDefaultName(serviceProperties.getRelativeHadoopClusterLabel());
            }
            boolean confFileExist = false;
            Set<String> libraries = ((DynamicClassLoader) classLoader).getLibraries();
            for (String lib : libraries) {
                if (customConfsJarName.equals(lib)) {
                    confFileExist = true;
                    break;
                }
            }
            boolean rebuildClassloader = false;
            Set<String> addedJarSet = new HashSet<>();
            Set<String> excludedJarSet = new HashSet<>();
            Consumer<DynamicClassLoader> afterLoaded = null;
            if (serviceProperties.isSetHadoopConf()) {
                String hadoopConfSpecificJarPath = serviceProperties.getHadoopConfSpecificJar();
                boolean jarInvalid = false;
                if (StringUtils.isBlank(hadoopConfSpecificJarPath) || !new File(hadoopConfSpecificJarPath).exists()) {
                    jarInvalid = true;
                }
                if (jarInvalid) {
                    ExceptionHandler.process(new Exception("Hadoop configuration JAR path invalid: " + hadoopConfSpecificJarPath));
                } else {
                    afterLoaded = (t) -> t.addLibrary(hadoopConfSpecificJarPath);
                }
                excludedJarSet.add(customConfsJarName);
                // remove the default jars, since it will be conflict with the new jars
                excludedJarSet.addAll(Arrays.asList(HadoopClassLoaderFactory2.getSecurityJars(category)));
                rebuildClassloader = true;
            } else {
                if (!confFileExist) {
                    addedJarSet.add(customConfsJarName);
                    // remove the default jars, since it will be conflict with the new jars
                    excludedJarSet.addAll(Arrays.asList(HadoopClassLoaderFactory2.getSecurityJars(category)));
                    rebuildClassloader = true;
                }
            }
            if (rebuildClassloader) {
                try {
                    classLoader = DynamicClassLoader.createNewOneBaseLoader((DynamicClassLoader) baseLoader, addedJarSet.toArray(new String[0]), excludedJarSet.toArray(new String[0]));
                    if (afterLoaded != null) {
                        afterLoaded.accept((DynamicClassLoader) classLoader);
                    }
                } catch (MalformedURLException e) {
                    ExceptionHandler.process(e);
                }
            }
        }
    }
    return classLoader;
}
Also used : DynamicClassLoader(org.talend.core.classloader.DynamicClassLoader) IHadoopClusterService(org.talend.core.hadoop.IHadoopClusterService) MalformedURLException(java.net.MalformedURLException) DynamicClassLoader(org.talend.core.classloader.DynamicClassLoader) File(java.io.File) MalformedURLException(java.net.MalformedURLException) HadoopServerException(org.talend.designer.hdfsbrowse.exceptions.HadoopServerException) HashSet(java.util.HashSet)

Example 4 with IHadoopClusterService

use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.

the class HadoopClusterImportHandler method findRelatedImportItems.

@Override
public List<ImportItem> findRelatedImportItems(IProgressMonitor monitor, ResourcesManager resManager, ImportItem importItem, ImportItem[] allImportItemRecords) throws Exception {
    List<ImportItem> relatedItemRecords = new ArrayList<ImportItem>();
    relatedItemRecords.addAll(super.findRelatedImportItems(monitor, resManager, importItem, allImportItemRecords));
    if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopClusterService.class)) {
        IHadoopClusterService hadoopClusterService = (IHadoopClusterService) GlobalServiceRegister.getDefault().getService(IHadoopClusterService.class);
        final Item item = importItem.getItem();
        if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterItem(item)) {
            resolveItem(resManager, importItem);
            HadoopClusterConnection hcConnection = (HadoopClusterConnection) ((HadoopClusterConnectionItem) item).getConnection();
            String clusterId = item.getProperty().getId();
            for (ImportItem ir : allImportItemRecords) {
                resolveItem(resManager, ir);
                Item subItem = ir.getItem();
                String hcId = null;
                if (subItem instanceof HadoopSubConnectionItem) {
                    hcId = ((HadoopSubConnection) ((HadoopSubConnectionItem) subItem).getConnection()).getRelativeHadoopClusterId();
                } else if (subItem instanceof DatabaseConnectionItem) {
                    hcId = ((DatabaseConnection) ((DatabaseConnectionItem) subItem).getConnection()).getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID);
                }
                if (clusterId.equals(hcId)) {
                    if (subItem instanceof HadoopSubConnectionItem) {
                        EList<String> connectionList = hcConnection.getConnectionList();
                        String subItemId = subItem.getProperty().getId();
                        if (!connectionList.contains(subItemId)) {
                            connectionList.add(subItemId);
                        }
                    }
                    relatedItemRecords.add(ir);
                }
            }
        }
    }
    return relatedItemRecords;
}
Also used : ImportItem(org.talend.repository.items.importexport.handlers.model.ImportItem) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) HadoopSubConnectionItem(org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem) Item(org.talend.core.model.properties.Item) ImportItem(org.talend.repository.items.importexport.handlers.model.ImportItem) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem) IHadoopClusterService(org.talend.core.hadoop.IHadoopClusterService) ArrayList(java.util.ArrayList) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopSubConnectionItem(org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem)

Example 5 with IHadoopClusterService

use of org.talend.core.hadoop.IHadoopClusterService in project tbd-studio-se by Talend.

the class HadoopServerUtil method getConfiguration.

public static Object getConfiguration(HDFSConnectionBean connection, ClassLoader currentClassLoader) throws HadoopServerException {
    Object conf = null;
    String userName = StringUtils.trimToNull(connection.getUserName());
    String namenodePrincipal = StringUtils.trimToNull(connection.getPrincipal());
    String group = StringUtils.trimToNull(connection.getGroup());
    boolean enableKerberos = connection.isEnableKerberos();
    boolean useKeytab = connection.isUseKeytab();
    String keytabPrincipal = StringUtils.trimToNull(connection.getKeytabPrincipal());
    String keytab = StringUtils.trimToNull(connection.getKeytab());
    String nameNodeURI = connection.getNameNodeURI();
    nameNodeURI = TalendQuoteUtils.removeQuotesIfExist(nameNodeURI);
    if (userName != null) {
        userName = TalendQuoteUtils.removeQuotesIfExist(userName);
    }
    if (namenodePrincipal != null) {
        namenodePrincipal = TalendQuoteUtils.removeQuotesIfExist(namenodePrincipal);
    }
    if (group != null) {
        group = TalendQuoteUtils.removeQuotesIfExist(group);
    }
    if (keytabPrincipal != null) {
        keytabPrincipal = TalendQuoteUtils.removeQuotesIfExist(keytabPrincipal);
    }
    if (keytab != null) {
        keytab = TalendQuoteUtils.removeQuotesIfExist(keytab);
    }
    if (HadoopClassLoaderUtil.isWebHDFS(nameNodeURI)) {
        IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
        String hcId = connection.getRelativeHadoopClusterId();
        if (StringUtils.isNotBlank(hcId) && hadoopClusterService != null) {
            Map<String, String> parameters = hadoopClusterService.getHadoopDbParameters(hcId);
            if (parameters.size() > 0) {
                ContextType contextType = hadoopClusterService.getHadoopClusterContextType(hcId);
                if (contextType != null) {
                    connection.setParentContextType(contextType);
                }
                boolean isUseSSL = Boolean.parseBoolean(parameters.get(ConnParameterKeys.CONN_PARA_KEY_USE_WEBHDFS_SSL));
                String trustStorePath = connection.getRealValue(parameters.get(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PATH), true);
                String trustStorePassword = connection.getRealValue(parameters.get(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PASSWORD), true);
                HadoopRepositoryUtil.setSSLSystemProperty(isUseSSL, nameNodeURI, trustStorePath, trustStorePassword);
            }
        }
    }
    ClassLoader classLoader = currentClassLoader;
    ClassLoader oldClassLoaderLoader = Thread.currentThread().getContextClassLoader();
    try {
        if (classLoader == null) {
            classLoader = getClassLoader(connection);
        }
        Thread.currentThread().setContextClassLoader(classLoader);
        // $NON-NLS-1$
        conf = Class.forName("org.apache.hadoop.conf.Configuration", true, classLoader).newInstance();
        EHadoopConfProperties.FS_DEFAULT_URI.set(conf, nameNodeURI);
        if (enableKerberos) {
            assert namenodePrincipal != null;
            userName = null;
            EHadoopConfProperties.KERBEROS_PRINCIPAL.set(conf, namenodePrincipal);
            // $NON-NLS-1$
            EHadoopConfProperties.AUTHENTICATION.set(conf, "KERBEROS");
        }
        if (group != null) {
            assert userName != null;
            EHadoopConfProperties.JOB_UGI.set(conf, userName + GROUP_SEPARATOR + group);
        }
        if (useKeytab) {
            assert keytabPrincipal != null;
            assert keytab != null;
            // $NON-NLS-1$
            ReflectionUtils.invokeStaticMethod(// $NON-NLS-1$
            "org.apache.hadoop.security.UserGroupInformation", // $NON-NLS-1$
            classLoader, "loginUserFromKeytab", // $NON-NLS-1$
            new String[] { keytabPrincipal, keytab });
        }
        Map<String, Object> configurations = connection.getConfigurations();
        Iterator<Entry<String, Object>> configsIterator = configurations.entrySet().iterator();
        while (configsIterator.hasNext()) {
            Entry<String, Object> configEntry = configsIterator.next();
            String key = configEntry.getKey();
            Object value = configEntry.getValue();
            if (key == null) {
                continue;
            }
            ReflectionUtils.invokeMethod(conf, "set", new Object[] { key, String.valueOf(value) }, String.class, // $NON-NLS-1$
            String.class);
        }
        // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        ReflectionUtils.invokeMethod(// $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        conf, // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        "set", // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        new Object[] { "dfs.client.use.datanode.hostname", "true" }, // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
        String.class, String.class);
    } catch (Exception e) {
        throw new HadoopServerException(e);
    } finally {
        Thread.currentThread().setContextClassLoader(oldClassLoaderLoader);
    }
    return conf;
}
Also used : ContextType(org.talend.designer.core.model.utils.emf.talendfile.ContextType) HadoopServerException(org.talend.designer.hdfsbrowse.exceptions.HadoopServerException) HadoopServerException(org.talend.designer.hdfsbrowse.exceptions.HadoopServerException) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) IHadoopClusterService(org.talend.core.hadoop.IHadoopClusterService) Entry(java.util.Map.Entry) DynamicClassLoader(org.talend.core.classloader.DynamicClassLoader)

Aggregations

IHadoopClusterService (org.talend.core.hadoop.IHadoopClusterService)10 Item (org.talend.core.model.properties.Item)6 ProcessItem (org.talend.core.model.properties.ProcessItem)4 IElementParameter (org.talend.core.model.process.IElementParameter)3 ConnectionItem (org.talend.core.model.properties.ConnectionItem)3 ContextItem (org.talend.core.model.properties.ContextItem)3 FileItem (org.talend.core.model.properties.FileItem)3 JobletProcessItem (org.talend.core.model.properties.JobletProcessItem)3 SQLPatternItem (org.talend.core.model.properties.SQLPatternItem)3 ArrayList (java.util.ArrayList)2 HashSet (java.util.HashSet)2 DynamicClassLoader (org.talend.core.classloader.DynamicClassLoader)2 IProcess (org.talend.core.model.process.IProcess)2 IProcess2 (org.talend.core.model.process.IProcess2)2 BusinessProcessItem (org.talend.core.model.properties.BusinessProcessItem)2 DatabaseConnectionItem (org.talend.core.model.properties.DatabaseConnectionItem)2 FolderItem (org.talend.core.model.properties.FolderItem)2 LinkDocumentationItem (org.talend.core.model.properties.LinkDocumentationItem)2 ReferenceFileItem (org.talend.core.model.properties.ReferenceFileItem)2 RoutineItem (org.talend.core.model.properties.RoutineItem)2