use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterService method getHadoopClusterContextType.
@Override
public ContextType getHadoopClusterContextType(String id) {
Item item = getHadoopClusterItemById(id);
if (item instanceof HadoopClusterConnectionItem) {
HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) item;
HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcItem.getConnection();
if (hcConnection != null && hcConnection.isContextMode()) {
return ConnectionContextHelper.getContextTypeForContextMode(hcConnection, true);
}
}
return null;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterService method isInContextMode.
@Override
public boolean isInContextMode(Connection connection) {
if (connection == null) {
return false;
}
boolean isContextMode = false;
if (connection instanceof HadoopClusterConnection) {
isContextMode = connection.isContextMode();
} else {
Connection hcConnection = getHadoopClusterConnectionBySubConnection(connection);
isContextMode = connection.isContextMode() || (hcConnection != null && hcConnection.isContextMode());
}
return isContextMode;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class ClouderaNavigatorOptionPage method finish.
public void finish() {
HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
connection.setClouderaNaviUserName(navigator_usernameText.getText());
connection.setClouderaNaviPassword(navigator_passwordText.getText());
connection.setClouderaNaviUrl(navigator_urlText.getText());
connection.setClouderaNaviClientUrl(navigator_client_urlText.getText());
connection.setClouderaNaviMetadataUrl(navigator_metatata_urlText.getText());
connection.setClouderaAutoCommit(navigator_autocommitBtn.getSelection());
connection.setClouderaDisableSSL(navigator_disable_sslBtn.getSelection());
connection.setClouderaDieNoError(navigator_die_on_errorBtn.getSelection());
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopContextConfConfigDialog method open.
@Override
public int open() {
HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
if (connection.isContextMode()) {
EMap<String, byte[]> confFiles = connection.getConfFiles();
ContextItem contextItem = ContextUtils.getContextItemById2(connection.getContextId());
if (contextItem != null) {
EList<ContextType> contexts = contextItem.getContext();
for (ContextType contextType : contexts) {
String contextName = contextType.getName();
String jarName = HadoopConfsUtils.getConfsJarDefaultName(connectionItem, false, contextName);
byte[] bs = confFiles.get(contextName);
if (bs == null) {
// $NON-NLS-1$
jarName = Messages.getString("HadoopContextConfConfigDialog.prompt.importJar");
}
context2Jar.put(contextName, jarName);
}
return super.open();
}
} else {
HadoopConfsUtils.openHadoopConfsWizard(parentForm, connectionItem, false);
}
return IDialogConstants.OK_ID;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class StandardHCInfoForm method init.
@Override
public void init() {
if (isNeedFillDefaults()) {
fillDefaults();
}
if (isContextMode()) {
adaptFormToEditable();
}
EAuthenticationMode authMode = EAuthenticationMode.getAuthenticationByName(getConnection().getAuthMode(), false);
if (authMode != null) {
authenticationCombo.setText(authMode.getDisplayName());
} else {
authenticationCombo.select(0);
}
HadoopClusterConnection connection = getConnection();
namenodeUriText.setText(connection.getNameNodeURI());
jobtrackerUriText.setText(connection.getJobTrackerURI());
rmSchedulerText.setText(StringUtils.trimToEmpty(connection.getRmScheduler()));
jobHistoryText.setText(StringUtils.trimToEmpty(connection.getJobHistory()));
stagingDirectoryText.setText(StringUtils.trimToEmpty(connection.getStagingDirectory()));
useDNHostBtn.setSelection(connection.isUseDNHost());
useSparkPropertiesBtn.setSelection(connection.isUseSparkProperties());
useCustomConfBtn.setSelection(connection.isUseCustomConfs());
if (useClouderaNaviBtn != null) {
useClouderaNaviBtn.setSelection(connection.isUseClouderaNavi());
}
kerberosBtn.setSelection(connection.isEnableKerberos());
namenodePrincipalText.setText(connection.getPrincipal());
jtOrRmPrincipalText.setText(connection.getJtOrRmPrincipal());
jobHistoryPrincipalText.setText(connection.getJobHistoryPrincipal());
keytabBtn.setSelection(connection.isUseKeytab());
keytabPrincipalText.setText(connection.getKeytabPrincipal());
keytabText.setText(connection.getKeytab());
userNameText.setText(connection.getUserName());
groupText.setText(connection.getGroup());
//
maprTBtn.setSelection(connection.isEnableMaprT());
maprTPasswordText.setText(connection.getMaprTPassword());
maprTClusterText.setText(connection.getMaprTCluster());
maprTDurationText.setText(connection.getMaprTDuration());
setMaprTHomeDirBtn.setSelection(connection.isSetMaprTHomeDir());
setHadoopLoginBtn.setSelection(connection.isSetHadoopLogin());
preloadAuthentificationBtn.setSelection(connection.isPreloadAuthentification());
maprTHomeDirText.setText(connection.getMaprTHomeDir());
maprTHadoopLoginText.setText(connection.getMaprTHadoopLogin());
//
useWebHDFSSSLEncryptionBtn.setSelection(connection.isUseWebHDFSSSL());
webHDFSSSLTrustStorePath.setText(connection.getWebHDFSSSLTrustStorePath());
webHDFSSSLTrustStorePassword.setText(connection.getWebHDFSSSLTrustStorePassword());
setHadoopConfBtn.setSelection(Boolean.valueOf(HCParameterUtil.isOverrideHadoopConfs(connection)));
hadoopConfSpecificJarText.setText(Optional.ofNullable(connection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR)).orElse(""));
needInitializeContext = true;
updateStatus(IStatus.OK, EMPTY_STRING);
onUseCustomConfBtnSelected(null);
onOverrideHadoopConfBtnSelected(null);
if ("SPARK".equals(((HadoopClusterConnectionImpl) this.connectionItem.getConnection()).getDistribution())) {
useCustomConfBtn.setEnabled(false);
useCustomConfBtn.setSelection(true);
setHadoopConfBtn.setEnabled(false);
setHadoopConfBtn.setSelection(true);
hadoopConfSpecificJarText.setEditable(true);
String sparkModeValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SPARK_MODE);
if (sparkModeValue != null) {
sparkModeCombo.setText(getSparkModeByValue(sparkModeValue).getLabel());
} else {
sparkModeCombo.setText(ESparkMode.KUBERNETES.getLabel());
}
String providerValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLOUD_PROVIDER);
if (providerValue != null) {
cloudProviderCombo.setText(getDatabriksCloudProviderByVaule(providerValue).getProviderLableName());
} else {
cloudProviderCombo.setText(EDatabriksCloudProvider.AWS.getProviderLableName());
}
String runModeValue = getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_RUN_MODE);
if (runModeValue != null) {
runSubmitCombo.setText(getDatabriksRunModeByValue(runModeValue).getRunModeLabel());
} else {
runSubmitCombo.setText(EDatabriksSubmitMode.CREATE_RUN_JOB.getRunModeLabel());
}
String endPoint = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_ENDPOINT));
endpointText.setText(endPoint);
String clusterId = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLUSTER_ID));
clusterIDText.setText(clusterId);
String token = StringUtils.trimToEmpty(EncryptionUtil.getValue(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_TOKEN), false));
tokenText.setText(token);
String folder = StringUtils.trimToEmpty(getConnection().getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_DBFS_DEP_FOLDER));
dbfsDepFolderText.setText(folder);
}
}
Aggregations