Search in sources :

Example 31 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterService method getHadoopClusterContextType.

@Override
public ContextType getHadoopClusterContextType(String id) {
    Item item = getHadoopClusterItemById(id);
    if (item instanceof HadoopClusterConnectionItem) {
        HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) item;
        HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcItem.getConnection();
        if (hcConnection != null && hcConnection.isContextMode()) {
            return ConnectionContextHelper.getContextTypeForContextMode(hcConnection, true);
        }
    }
    return null;
}
Also used : ContextItem(org.talend.core.model.properties.ContextItem) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem) Item(org.talend.core.model.properties.Item) DatabaseConnectionItem(org.talend.core.model.properties.DatabaseConnectionItem) HadoopSubConnectionItem(org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem) ConnectionItem(org.talend.core.model.properties.ConnectionItem) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopClusterConnectionItem(org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem)

Example 32 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopClusterService method isInContextMode.

@Override
public boolean isInContextMode(Connection connection) {
    if (connection == null) {
        return false;
    }
    boolean isContextMode = false;
    if (connection instanceof HadoopClusterConnection) {
        isContextMode = connection.isContextMode();
    } else {
        Connection hcConnection = getHadoopClusterConnectionBySubConnection(connection);
        isContextMode = connection.isContextMode() || (hcConnection != null && hcConnection.isContextMode());
    }
    return isContextMode;
}
Also used : DatabaseConnection(org.talend.core.model.metadata.builder.connection.DatabaseConnection) HadoopSubConnection(org.talend.repository.model.hadoopcluster.HadoopSubConnection) Connection(org.talend.core.model.metadata.builder.connection.Connection) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 33 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class ClouderaNavigatorOptionPage method finish.

public void finish() {
    HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
    connection.setClouderaNaviUserName(navigator_usernameText.getText());
    connection.setClouderaNaviPassword(navigator_passwordText.getText());
    connection.setClouderaNaviUrl(navigator_urlText.getText());
    connection.setClouderaNaviClientUrl(navigator_client_urlText.getText());
    connection.setClouderaNaviMetadataUrl(navigator_metatata_urlText.getText());
    connection.setClouderaAutoCommit(navigator_autocommitBtn.getSelection());
    connection.setClouderaDisableSSL(navigator_disable_sslBtn.getSelection());
    connection.setClouderaDieNoError(navigator_die_on_errorBtn.getSelection());
}
Also used : HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 34 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class HadoopContextConfConfigDialog method open.

@Override
public int open() {
    HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
    if (connection.isContextMode()) {
        EMap<String, byte[]> confFiles = connection.getConfFiles();
        ContextItem contextItem = ContextUtils.getContextItemById2(connection.getContextId());
        if (contextItem != null) {
            EList<ContextType> contexts = contextItem.getContext();
            for (ContextType contextType : contexts) {
                String contextName = contextType.getName();
                String jarName = HadoopConfsUtils.getConfsJarDefaultName(connectionItem, false, contextName);
                byte[] bs = confFiles.get(contextName);
                if (bs == null) {
                    // $NON-NLS-1$
                    jarName = Messages.getString("HadoopContextConfConfigDialog.prompt.importJar");
                }
                context2Jar.put(contextName, jarName);
            }
            return super.open();
        }
    } else {
        HadoopConfsUtils.openHadoopConfsWizard(parentForm, connectionItem, false);
    }
    return IDialogConstants.OK_ID;
}
Also used : ContextItem(org.talend.core.model.properties.ContextItem) ContextType(org.talend.designer.core.model.utils.emf.talendfile.ContextType) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Example 35 with HadoopClusterConnection

use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.

the class StandardHCInfoForm method init.

@Override
public void init() {
    if (isNeedFillDefaults()) {
        fillDefaults();
    }
    if (isContextMode()) {
        adaptFormToEditable();
    }
    EAuthenticationMode authMode = EAuthenticationMode.getAuthenticationByName(getConnection().getAuthMode(), false);
    if (authMode != null) {
        authenticationCombo.setText(authMode.getDisplayName());
    } else {
        authenticationCombo.select(0);
    }
    HadoopClusterConnection connection = getConnection();
    namenodeUriText.setText(connection.getNameNodeURI());
    jobtrackerUriText.setText(connection.getJobTrackerURI());
    rmSchedulerText.setText(StringUtils.trimToEmpty(connection.getRmScheduler()));
    jobHistoryText.setText(StringUtils.trimToEmpty(connection.getJobHistory()));
    stagingDirectoryText.setText(StringUtils.trimToEmpty(connection.getStagingDirectory()));
    useDNHostBtn.setSelection(connection.isUseDNHost());
    useSparkPropertiesBtn.setSelection(connection.isUseSparkProperties());
    useCustomConfBtn.setSelection(connection.isUseCustomConfs());
    if (useClouderaNaviBtn != null) {
        useClouderaNaviBtn.setSelection(connection.isUseClouderaNavi());
    }
    kerberosBtn.setSelection(connection.isEnableKerberos());
    namenodePrincipalText.setText(connection.getPrincipal());
    jtOrRmPrincipalText.setText(connection.getJtOrRmPrincipal());
    jobHistoryPrincipalText.setText(connection.getJobHistoryPrincipal());
    keytabBtn.setSelection(connection.isUseKeytab());
    keytabPrincipalText.setText(connection.getKeytabPrincipal());
    keytabText.setText(connection.getKeytab());
    userNameText.setText(connection.getUserName());
    groupText.setText(connection.getGroup());
    // 
    maprTBtn.setSelection(connection.isEnableMaprT());
    maprTPasswordText.setText(connection.getMaprTPassword());
    maprTClusterText.setText(connection.getMaprTCluster());
    maprTDurationText.setText(connection.getMaprTDuration());
    setMaprTHomeDirBtn.setSelection(connection.isSetMaprTHomeDir());
    setHadoopLoginBtn.setSelection(connection.isSetHadoopLogin());
    preloadAuthentificationBtn.setSelection(connection.isPreloadAuthentification());
    maprTHomeDirText.setText(connection.getMaprTHomeDir());
    maprTHadoopLoginText.setText(connection.getMaprTHadoopLogin());
    // 
    useWebHDFSSSLEncryptionBtn.setSelection(connection.isUseWebHDFSSSL());
    webHDFSSSLTrustStorePath.setText(connection.getWebHDFSSSLTrustStorePath());
    webHDFSSSLTrustStorePassword.setText(connection.getWebHDFSSSLTrustStorePassword());
    setHadoopConfBtn.setSelection(Boolean.valueOf(HCParameterUtil.isOverrideHadoopConfs(connection)));
    hadoopConfSpecificJarText.setText(Optional.ofNullable(connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR)).orElse(""));
    needInitializeContext = true;
    updateStatus(IStatus.OK, EMPTY_STRING);
    onUseCustomConfBtnSelected(null);
    onOverrideHadoopConfBtnSelected(null);
    if ("SPARK".equals(((HadoopClusterConnectionImpl) this.connectionItem.getConnection()).getDistribution())) {
        useCustomConfBtn.setEnabled(false);
        useCustomConfBtn.setSelection(true);
        setHadoopConfBtn.setEnabled(false);
        setHadoopConfBtn.setSelection(true);
        hadoopConfSpecificJarText.setEditable(true);
        String sparkModeValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SPARK_MODE);
        if (sparkModeValue != null) {
            sparkModeCombo.setText(getSparkModeByValue(sparkModeValue).getLabel());
        } else {
            sparkModeCombo.setText(ESparkMode.KUBERNETES.getLabel());
        }
        String providerValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLOUD_PROVIDER);
        if (providerValue != null) {
            cloudProviderCombo.setText(getDatabriksCloudProviderByVaule(providerValue).getProviderLableName());
        } else {
            cloudProviderCombo.setText(EDatabriksCloudProvider.AWS.getProviderLableName());
        }
        String runModeValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_RUN_MODE);
        if (runModeValue != null) {
            runSubmitCombo.setText(getDatabriksRunModeByValue(runModeValue).getRunModeLabel());
        } else {
            runSubmitCombo.setText(EDatabriksSubmitMode.CREATE_RUN_JOB.getRunModeLabel());
        }
        String endPoint = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_ENDPOINT));
        endpointText.setText(endPoint);
        String clusterId = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLUSTER_ID));
        clusterIDText.setText(clusterId);
        String token = StringUtils.trimToEmpty(EncryptionUtil.getValue(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_TOKEN), false));
        tokenText.setText(token);
        String folder = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_DBFS_DEP_FOLDER));
        dbfsDepFolderText.setText(folder);
    }
}
Also used : EAuthenticationMode(org.talend.core.hadoop.version.EAuthenticationMode) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Aggregations

HadoopClusterConnection (org.talend.repository.model.hadoopcluster.HadoopClusterConnection)78 HadoopClusterConnectionItem (org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem)30 Test (org.junit.Test)15 ContextItem (org.talend.core.model.properties.ContextItem)14 ContextType (org.talend.designer.core.model.utils.emf.talendfile.ContextType)13 DatabaseConnectionItem (org.talend.core.model.properties.DatabaseConnectionItem)9 ArrayList (java.util.ArrayList)7 Map (java.util.Map)7 Item (org.talend.core.model.properties.Item)7 IRepositoryViewObject (org.talend.core.model.repository.IRepositoryViewObject)7 DistributionBean (org.talend.hadoop.distribution.model.DistributionBean)7 ConnectionItem (org.talend.core.model.properties.ConnectionItem)6 HadoopSubConnectionItem (org.talend.repository.model.hadoopcluster.HadoopSubConnectionItem)6 File (java.io.File)5 IHDistributionVersion (org.talend.core.runtime.hd.IHDistributionVersion)5 HashMap (java.util.HashMap)4 HashSet (java.util.HashSet)4 List (java.util.List)4 PersistenceException (org.talend.commons.exception.PersistenceException)4 DatabaseConnection (org.talend.core.model.metadata.builder.connection.DatabaseConnection)4