Search in sources :

Example 61 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterService method getHadoopCustomLibraries.

/*
     * (non-Javadoc)
     *
     * @see org.talend.core.hadoop.IHadoopClusterService#getHadoopCustomLibraries()
     */
@Override
public Map<String, String> getHadoopCustomLibraries(String clusterId) {
    Map<String, String> customLibraries = new HashMap<>();
    HadoopClusterConnection hadoopClusterConnection = HCRepositoryUtil.getRelativeHadoopClusterConnection(clusterId);
    if (hadoopClusterConnection != null) {
        EMap<String, String> parameters = hadoopClusterConnection.getParameters();
        for (String key : parameters.keySet()) {
            customLibraries.put(key, parameters.get(key));
        }
    }
    return customLibraries;
}
Also used : HashMap(java.util.HashMap) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 62 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterService method copyHadoopCluster.

@Override
public void copyHadoopCluster(final Item sourceItem, final IPath path, String newName) throws PersistenceException, BusinessException {
    if (isHadoopClusterItem(sourceItem)) {
        IProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
        HadoopClusterConnectionItem sourceClusterItem = (HadoopClusterConnectionItem) sourceItem;
        HadoopClusterConnectionItem targetClusterItem = null;
        if (StringUtils.isNotBlank(newName)) {
            targetClusterItem = (HadoopClusterConnectionItem) factory.copy(sourceClusterItem, path, newName);
        } else {
            targetClusterItem = (HadoopClusterConnectionItem) factory.copy(sourceClusterItem, path, true);
        }
        HadoopClusterConnection targetClusterConnection = (HadoopClusterConnection) targetClusterItem.getConnection();
        targetClusterConnection.getConnectionList().clear();
        String targetClusterId = targetClusterItem.getProperty().getId();
        Set<Item> sourceSubitems = HCRepositoryUtil.getSubitemsOfHadoopCluster(sourceClusterItem);
        for (Item subitem : sourceSubitems) {
            Item newSubitem = factory.copy(subitem, path, true);
            if (newSubitem instanceof HadoopSubConnectionItem) {
                ((HadoopSubConnection) ((HadoopSubConnectionItem) newSubitem).getConnection()).setRelativeHadoopClusterId(targetClusterId);
                targetClusterConnection.getConnectionList().add(newSubitem.getProperty().getId());
            } else if (subitem instanceof DatabaseConnectionItem) {
                ((DatabaseConnection) ((DatabaseConnectionItem) newSubitem).getConnection()).getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID, targetClusterId);
            }
            factory.save(newSubitem);
        }
        factory.save(targetClusterItem);
    }
}
Also used : ContextItem(org.talend.core.model.properties.ContextItem) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) Item(org.talend.core.model.properties.Item) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem) HadoopSubConnectionItem(org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem) ConnectionItem(org.talend.core.model.properties.ConnectionItem) DatabaseConnection(org.talend.core.model.metadata.builder.connection.DatabaseConnection) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopSubConnection(org.talend.repository.model.hadoopcluster.HadoopSubConnection) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) HadoopSubConnectionItem(org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem) IProxyRepositoryFactory(org.talend.repository.model.IProxyRepositoryFactory) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem)

Example 63 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class AdaptDeprecatedHadoopVersionsMigrationTask method execute.

@Override
public ExecutionResult execute(Item item) {
    if (item instanceof HadoopClusterConnectionItem) {
        DistributionBean[] distributions = HadoopDistributionsHelper.HADOOP.getDistributions();
        if (distributions != null && distributions.length > 0) {
            boolean modified = false;
            HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) item;
            HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcItem.getConnection();
            DistributionBean distributionBean = HadoopDistributionsHelper.HADOOP.getDistribution(hcConnection.getDistribution(), false);
            if (distributionBean == null) {
                distributionBean = distributions[0];
                hcConnection.setDistribution(distributionBean.getName());
                modified = true;
            }
            String version = hcConnection.getDfVersion();
            DistributionVersion distributionVersion = distributionBean.getVersion(version, false);
            if (distributionVersion == null) {
                DistributionVersion[] versions = distributionBean.getVersions();
                if (versions != null && versions.length > 0) {
                    hcConnection.setDfVersion(versions[0].getVersion());
                    modified = true;
                }
            }
            if (modified) {
                try {
                    ProxyRepositoryFactory.getInstance().save(hcItem, true);
                    return ExecutionResult.SUCCESS_NO_ALERT;
                } catch (PersistenceException e) {
                    ExceptionHandler.process(e);
                    return ExecutionResult.FAILURE;
                }
            }
        }
    }
    return ExecutionResult.NOTHING_TO_DO;
}
Also used : DistributionVersion(org.talend.hadoop.distribution.model.DistributionVersion) PersistenceException(org.talend.commons.exception.PersistenceException) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) DistributionBean(org.talend.hadoop.distribution.model.DistributionBean)

Example 64 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterContextHandler method matchContextForAttribues.

@Override
protected void matchContextForAttribues(Connection conn, IConnParamName paramName, String hadoopVariableName) {
    HadoopClusterConnection hadoopConn = (HadoopClusterConnection) conn;
    EHadoopParamName hadoopParam = (EHadoopParamName) paramName;
    switch(hadoopParam) {
        case NameNodeUri:
            hadoopConn.setNameNodeURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobTrackerUri:
            hadoopConn.setJobTrackerURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ResourceManager:
            hadoopConn.setJobTrackerURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ResourceManagerScheduler:
            hadoopConn.setRmScheduler(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobHistory:
            hadoopConn.setJobHistory(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case StagingDirectory:
            hadoopConn.setStagingDirectory(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case NameNodePrin:
            hadoopConn.setPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JTOrRMPrin:
            hadoopConn.setJtOrRmPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobHistroyPrin:
            hadoopConn.setJobHistoryPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case User:
            hadoopConn.setUserName(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case Group:
            hadoopConn.setGroup(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case Principal:
            hadoopConn.setKeytabPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyTab:
            hadoopConn.setKeytab(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebPort:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_PORT, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebJobResFolder:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_JOB_RESULT_FOLDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case HDIUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HDI_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case HDIPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HDI_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureHost:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureContainer:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_CONTAINER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzuresUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzurePassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureDeployBlob:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_DEPLOY_BLOB, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_HOST, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseAuthToken:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_AUTH_TOKEN, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseSparkPools:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_SPARK_POOLS, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsContainer:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_CONTAINER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsUserName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDeployBlob:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_DEPLOY_BLOB, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDriverMemory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DRIVER_MEMORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDriverCores:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DRIVER_CORES, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseExecutorMemory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_EXECUTOR_MEMORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorUsername:
            hadoopConn.setClouderaNaviUserName(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorPassword:
            hadoopConn.setClouderaNaviPassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorUrl:
            hadoopConn.setClouderaNaviUrl(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorMetadataUrl:
            hadoopConn.setClouderaNaviMetadataUrl(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTPassword:
            hadoopConn.setMaprTPassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTCluster:
            hadoopConn.setMaprTCluster(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTDuration:
            hadoopConn.setMaprTDuration(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTHomeDir:
            hadoopConn.setMaprTHomeDir(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTHadoopLogin:
            hadoopConn.setMaprTHadoopLogin(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleProjectId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_PROJECT_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleClusterId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_CLUSTER_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleRegion:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_REGION, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleJarsBucket:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_JARS_BUCKET, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case PathToGoogleCredentials:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_PATH_TO_GOOGLE_CREDENTIALS, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksEndpoint:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_ENDPOINT, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksCloudProvider:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLOUD_PROVIDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DatabricksRunMode:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_RUN_MODE, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksClusterId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLUSTER_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksToken:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_TOKEN, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksDBFSDepFolder:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_DBFS_DEP_FOLDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHDFSSSLTrustStorePath:
            hadoopConn.setWebHDFSSSLTrustStorePath(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHDFSSSLTrustStorePassword:
            hadoopConn.setWebHDFSSSLTrustStorePassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case setHadoopConf:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SET_HADOOP_CONF, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case hadoopConfSpecificJar:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case UseKnox:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_USE_KNOX, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SparkMode:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SPARK_MODE, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxUrl:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_URL, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxUsername:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_USER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxDirectory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_DIRECTORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        default:
    }
}
Also used : EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 65 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterContextHandler method setPropertiesForExistContextMode.

@Override
public void setPropertiesForExistContextMode(Connection connection, Set<IConnParamName> paramSet, Map<ContextItem, List<ConectionAdaptContextVariableModel>> adaptMap) {
    if (connection == null) {
        return;
    }
    if (connection instanceof HadoopClusterConnection) {
        HadoopClusterConnection hadoopConn = (HadoopClusterConnection) connection;
        ContextItem currentContext = null;
        for (IConnParamName param : paramSet) {
            if (param instanceof EHadoopParamName) {
                String hadoopVariableName = null;
                EHadoopParamName hadoopParam = (EHadoopParamName) param;
                if (adaptMap != null && adaptMap.size() > 0) {
                    for (Map.Entry<ContextItem, List<ConectionAdaptContextVariableModel>> entry : adaptMap.entrySet()) {
                        currentContext = entry.getKey();
                        List<ConectionAdaptContextVariableModel> modelList = entry.getValue();
                        for (ConectionAdaptContextVariableModel model : modelList) {
                            if (model.getValue().equals(hadoopParam.name())) {
                                hadoopVariableName = model.getName();
                                break;
                            }
                        }
                    }
                }
                if (hadoopVariableName != null) {
                    hadoopVariableName = getCorrectVariableName(currentContext, hadoopVariableName, hadoopParam);
                    matchContextForAttribues(hadoopConn, hadoopParam, hadoopVariableName);
                }
            }
        }
        matchAdditionProperties(hadoopConn, adaptMap);
    }
}
Also used : ContextItem(org.talend.core.model.properties.ContextItem) EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) IConnParamName(org.talend.metadata.managment.ui.model.IConnParamName) ArrayList(java.util.ArrayList) List(java.util.List) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) ConectionAdaptContextVariableModel(org.talend.core.ui.context.model.table.ConectionAdaptContextVariableModel) Map(java.util.Map)

Aggregations

HadoopClusterConnection (org.talend.repository.model.hadoopcluster.HadoopClusterConnection)78 HadoopClusterConnectionItem (org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem)30 Test (org.junit.Test)15 ContextItem (org.talend.core.model.properties.ContextItem)14 ContextType (org.talend.designer.core.model.utils.emf.talendfile.ContextType)13 DatabaseConnectionItem (org.talend.core.model.properties.DatabaseConnectionItem)9 ArrayList (java.util.ArrayList)7 Map (java.util.Map)7 Item (org.talend.core.model.properties.Item)7 IRepositoryViewObject (org.talend.core.model.repository.IRepositoryViewObject)7 DistributionBean (org.talend.hadoop.distribution.model.DistributionBean)7 ConnectionItem (org.talend.core.model.properties.ConnectionItem)6 HadoopSubConnectionItem (org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem)6 File (java.io.File)5 IHDistributionVersion (org.talend.core.runtime.hd.IHDistributionVersion)5 HashMap (java.util.HashMap)4 HashSet (java.util.HashSet)4 List (java.util.List)4 PersistenceException (org.talend.commons.exception.PersistenceException)4 DatabaseConnection (org.talend.core.model.metadata.builder.connection.DatabaseConnection)4