Search in sources :

Example 46 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HCRepositoryUtil method removeFromHadoopCluster.

/**
 * DOC ycbai Comment method "removeFromHadoopCluster".
 *
 * Remove the connection from hadoop cluster.
 *
 * @param clusterConnectionItem
 * @param connectionID
 * @throws PersistenceException
 */
public static void removeFromHadoopCluster(HadoopClusterConnectionItem clusterConnectionItem, String connectionID) throws PersistenceException {
    boolean updated = false;
    IProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
    List<String> connectionList = ((HadoopClusterConnection) clusterConnectionItem.getConnection()).getConnectionList();
    Iterator<String> connsIter = connectionList.iterator();
    while (connsIter.hasNext()) {
        String id = connsIter.next();
        if (id != null && id.equals(connectionID)) {
            connsIter.remove();
            updated = true;
        }
    }
    if (updated) {
        factory.save(clusterConnectionItem);
    }
}
Also used : HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) IProxyRepositoryFactory(org.talend.repository.model.IProxyRepositoryFactory)

Example 47 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class CreateHBaseAction method initConnectionParameters.

@Override
protected void initConnectionParameters(Map<String, String> initMap, HadoopClusterConnectionItem hcConnectionItem) {
    super.initConnectionParameters(initMap, hcConnectionItem);
    HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
    initMap.put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CUSTOM_JARS, hcConnection.getParameters().get(ECustomVersionGroup.HBASE.getName()));
    initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_TYPE, EDatabaseConnTemplate.HBASE.getDBTypeName());
    initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_PRODUCT, EDatabaseTypeName.HBASE.getProduct());
    initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_PORT, EDatabaseConnTemplate.HBASE.getDefaultPort());
}
Also used : HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 48 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class CreateHBaseAction method hideAction.

@Override
protected boolean hideAction(RepositoryNode node) {
    HadoopClusterConnectionItem hcConnectionItem = HCRepositoryUtil.getHCConnectionItemFromRepositoryNode(node);
    if (hcConnectionItem != null) {
        HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
        DistributionBean hbaseDistribution = HadoopDistributionsHelper.HBASE.getDistribution(hcConnection.getDistribution(), false);
        if (hbaseDistribution != null) {
            IHDistributionVersion hdVersion = hbaseDistribution.getHDVersion(hcConnection.getDfVersion(), false);
            if (hdVersion != null) {
                // found, don't hide
                return false;
            }
        }
    }
    return true;
}
Also used : IHDistributionVersion(org.talend.core.runtime.hd.IHDistributionVersion) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) DistributionBean(org.talend.hadoop.distribution.model.DistributionBean)

Example 49 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HDFSDragAndDropHandler method setHDFSRepositoryValue.

private void setHDFSRepositoryValue(HDFSConnection connection, INode node, IElementParameter param) {
    HadoopClusterConnection hcConnection = HCRepositoryUtil.getRelativeHadoopClusterConnection(connection);
    if (hcConnection == null) {
        return;
    }
    if (EHDFSRepositoryToComponent.DISTRIBUTION.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setDistribution(value);
        }
    } else if (EHDFSRepositoryToComponent.DB_VERSION.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setDfVersion(value);
        }
    } else if (EHDFSRepositoryToComponent.HADOOP_CUSTOM_JARS.getRepositoryValue().equals(param.getRepositoryValue())) {
        Object obj = param.getValue();
        if (obj != null) {
            Map<String, Set<String>> customVersionMap = HCVersionUtil.getRepCustomJarsParamFromComp((String) obj, ECustomVersionGroup.COMMON);
            HCVersionUtil.injectCustomVersionMap(hcConnection, customVersionMap);
        }
    } else if (EHDFSRepositoryToComponent.USE_YARN.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setUseYarn(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.AUTHENTICATION_MODE.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setAuthMode(value);
        }
    } else if (EHDFSRepositoryToComponent.FS_DEFAULT_NAME.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setNameNodeURI(value);
        }
    } else if (EHDFSRepositoryToComponent.USE_KRB.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setEnableKerberos(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.NAMENODE_PRINCIPAL.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setPrincipal(value);
        }
    } else if (EHDFSRepositoryToComponent.USE_KEYTAB.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setUseKeytab(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.KEYTAB_PRINCIPAL.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setKeytabPrincipal(value);
        }
    } else if (EHDFSRepositoryToComponent.KEYTAB_PATH.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setKeytab(value);
        }
    } else if (EHDFSRepositoryToComponent.USERNAME.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(connection, node, param);
        if (value != null) {
            connection.setUserName(value);
        }
    } else if (EHDFSRepositoryToComponent.GROUP.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setGroup(value);
        }
    } else if (EHDFSRepositoryToComponent.ROWSEPARATOR.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(connection, node, param);
        if (value != null) {
            connection.setRowSeparator(value);
        }
    } else if (EHDFSRepositoryToComponent.FIELDSEPARATOR.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(connection, node, param);
        if (value != null) {
            connection.setFieldSeparator(value);
        }
    } else if (EHDFSRepositoryToComponent.HEADER.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(connection, node, param);
        if (value != null) {
            connection.setHeaderValue(value);
        }
    } else if (EHDFSRepositoryToComponent.USE_MAPRTICKET.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setEnableMaprT(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.MAPRTICKET_PASSWORD.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setMaprTPassword(value);
        }
    } else if (EHDFSRepositoryToComponent.MAPRTICKET_CLUSTER.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setMaprTCluster(value);
        }
    } else if (EHDFSRepositoryToComponent.MAPRTICKET_DURATION.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setMaprTDuration(value);
        }
    } else if (EHDFSRepositoryToComponent.SET_MAPR_HOME_DIR.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setSetMaprTHomeDir(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.MAPR_HOME_DIR.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setMaprTHomeDir(value);
        }
    } else if (EHDFSRepositoryToComponent.SET_HADOOP_LOGIN.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setSetHadoopLogin(Boolean.valueOf(value));
        }
    } else if (EHDFSRepositoryToComponent.HADOOP_LOGIN.getRepositoryValue().equals(param.getRepositoryValue())) {
        String value = ComponentToRepositoryProperty.getParameterValue(hcConnection, node, param);
        if (value != null) {
            hcConnection.setMaprTHadoopLogin(value);
        }
    }
}
Also used : Set(java.util.Set) MetadataTableRepositoryObject(org.talend.core.repository.model.repositoryObject.MetadataTableRepositoryObject) IRepositoryViewObject(org.talend.core.model.repository.IRepositoryViewObject) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 50 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HDFSModelUtil method convert2HDFSConnectionBean.

/**
 * DOC ycbai Comment method "convert2HDFSConnectionBean".
 *
 * Convert HDFSConnection to HDFSConnectionBean
 *
 * @param connection
 * @return
 */
public static HDFSConnectionBean convert2HDFSConnectionBean(HDFSConnection connection) {
    ContextType contextType = null;
    if (connection.isContextMode()) {
        contextType = ConnectionContextHelper.getContextTypeForContextMode(connection, true);
    }
    HDFSConnectionBean bean = new HDFSConnectionBean();
    bean.setContextType(contextType);
    try {
        HadoopClusterConnection hcConnection = HCRepositoryUtil.getRelativeHadoopClusterConnection(connection);
        if (hcConnection != null) {
            ContextType parentContextType = null;
            if (hcConnection.isContextMode()) {
                parentContextType = ConnectionContextHelper.getContextTypeForContextMode(hcConnection, true);
            }
            bean.setParentContextType(parentContextType);
            BeanUtils.copyProperties(bean, hcConnection);
            Map<String, Object> properties = bean.getAdditionalProperties();
            Map<String, Set<String>> customVersionMap = HCVersionUtil.getCustomVersionMap(hcConnection);
            Iterator<Entry<String, Set<String>>> iter = customVersionMap.entrySet().iterator();
            while (iter.hasNext()) {
                Map.Entry<String, Set<String>> entry = iter.next();
                String groupName = entry.getKey();
                Set<String> jars = entry.getValue();
                if (jars != null && jars.size() > 0) {
                    properties.put(groupName, jars);
                }
            }
        }
        bean.setUserName(connection.getUserName());
        bean.setFieldSeparator(connection.getFieldSeparator());
        bean.setRowSeparator(connection.getRowSeparator());
        bean.setRelativeHadoopClusterId(connection.getRelativeHadoopClusterId());
        Map<String, Object> configurations = bean.getConfigurations();
        List<Map<String, Object>> hadoopProperties = HadoopRepositoryUtil.getHadoopPropertiesFullList(connection, connection.getHadoopProperties(), false, true);
        for (Map<String, Object> propMap : hadoopProperties) {
            // $NON-NLS-1$
            String key = TalendQuoteUtils.removeQuotesIfExist(String.valueOf(propMap.get("PROPERTY")));
            // $NON-NLS-1$
            String value = TalendQuoteUtils.removeQuotesIfExist(String.valueOf(propMap.get("VALUE")));
            if (StringUtils.isNotEmpty(key) && value != null) {
                configurations.put(key, value);
            }
        }
    } catch (Exception e) {
        log.error("Convert failure from HDFSConnection to HDFSConnectionBean", e);
    }
    return bean;
}
Also used : ContextType(org.talend.designer.core.model.utils.emf.talendfile.ContextType) Set(java.util.Set) Entry(java.util.Map.Entry) HDFSConnectionBean(org.talend.designer.hdfsbrowse.model.HDFSConnectionBean) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) Map(java.util.Map)

Aggregations

HadoopClusterConnection (org.talend.repository.model.hadoopcluster.HadoopClusterConnection)78 HadoopClusterConnectionItem (org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem)30 Test (org.junit.Test)15 ContextItem (org.talend.core.model.properties.ContextItem)14 ContextType (org.talend.designer.core.model.utils.emf.talendfile.ContextType)13 DatabaseConnectionItem (org.talend.core.model.properties.DatabaseConnectionItem)9 ArrayList (java.util.ArrayList)7 Map (java.util.Map)7 Item (org.talend.core.model.properties.Item)7 IRepositoryViewObject (org.talend.core.model.repository.IRepositoryViewObject)7 DistributionBean (org.talend.hadoop.distribution.model.DistributionBean)7 ConnectionItem (org.talend.core.model.properties.ConnectionItem)6 HadoopSubConnectionItem (org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem)6 File (java.io.File)5 IHDistributionVersion (org.talend.core.runtime.hd.IHDistributionVersion)5 HashMap (java.util.HashMap)4 HashSet (java.util.HashSet)4 List (java.util.List)4 PersistenceException (org.talend.commons.exception.PersistenceException)4 DatabaseConnection (org.talend.core.model.metadata.builder.connection.DatabaseConnection)4