use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterContextHandler method revertPropertiesForContextMode.
@Override
public void revertPropertiesForContextMode(Connection hadoopConn, ContextType contextType) {
if (hadoopConn instanceof HadoopClusterConnection) {
HadoopClusterConnection conn = (HadoopClusterConnection) hadoopConn;
String nameNodeUri = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getNameNodeURI()));
String jobTrackerUri = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getJobTrackerURI()));
String rmScheduler = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getRmScheduler()));
String jobHistory = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getJobHistory()));
String stagingDir = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getStagingDirectory()));
String nameNodePrin = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getPrincipal()));
String jtOrRmPrin = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getValue(conn.getJtOrRmPrincipal(), false)));
String jobHisPrin = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getJobHistoryPrincipal()));
String userName = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getUserName()));
String group = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getGroup()));
String principal = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getKeytabPrincipal()));
String keyTab = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getKeytab()));
String cnUserName = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getClouderaNaviUserName()));
String cnPassword = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getClouderaNaviPassword()));
String cnUrl = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getClouderaNaviUrl()));
String cnMetadataUrl = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getClouderaNaviMetadataUrl()));
String cnClientUrl = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getClouderaNaviClientUrl()));
String maprTPassword = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getMaprTPassword()));
String maprTCluster = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getMaprTCluster()));
String maprTDuration = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getMaprTDuration()));
String maprTHomeDir = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getMaprTHomeDir()));
String maprTHadoopLogin = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getMaprTHadoopLogin()));
String webHDFSSSLTrustStorePath = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getWebHDFSSSLTrustStorePath()));
String webHDFSSSLTrustStorePassword = TalendQuoteUtils.removeQuotes(ContextParameterUtils.getOriginalValue(contextType, conn.getWebHDFSSSLTrustStorePassword()));
for (String paramKey : ((HadoopClusterConnection) hadoopConn).getParameters().keySet()) {
String originalValue = ContextParameterUtils.getOriginalValue(contextType, conn.getParameters().get(paramKey));
conn.getParameters().put(paramKey, originalValue);
}
String hadoopProperties = conn.getHadoopProperties();
List<Map<String, Object>> propertiesAfterRevert = transformContextModeToHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesList(hadoopProperties), contextType);
conn.setHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(propertiesAfterRevert));
String sparkProperties = conn.getSparkProperties();
List<Map<String, Object>> sparkPropertiesAfterRevert = transformContextModeToHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesList(sparkProperties), contextType);
conn.setSparkProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(sparkPropertiesAfterRevert));
conn.setNameNodeURI(nameNodeUri);
conn.setJobTrackerURI(jobTrackerUri);
conn.setRmScheduler(rmScheduler);
conn.setJobHistory(jobHistory);
conn.setStagingDirectory(stagingDir);
conn.setPrincipal(nameNodePrin);
conn.setJtOrRmPrincipal(jtOrRmPrin);
conn.setJobHistoryPrincipal(jobHisPrin);
conn.setUserName(userName);
conn.setGroup(group);
conn.setKeytab(keyTab);
conn.setKeytabPrincipal(principal);
conn.setClouderaNaviUserName(cnUserName);
conn.setClouderaNaviPassword(cnPassword);
conn.setClouderaNaviUrl(cnUrl);
conn.setClouderaNaviMetadataUrl(cnMetadataUrl);
conn.setClouderaNaviClientUrl(cnClientUrl);
conn.setMaprTPassword(maprTPassword);
conn.setMaprTCluster(maprTCluster);
conn.setMaprTDuration(maprTDuration);
conn.setMaprTHomeDir(maprTHomeDir);
conn.setMaprTHadoopLogin(maprTHadoopLogin);
conn.setWebHDFSSSLTrustStorePath(webHDFSSSLTrustStorePath);
conn.setWebHDFSSSLTrustStorePassword(webHDFSSSLTrustStorePassword);
}
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterContextHandler method createContextParameters.
@Override
public List<IContextParameter> createContextParameters(String prefixName, Connection connection, Set<IConnParamName> paramSet) {
List<IContextParameter> varList = new ArrayList<IContextParameter>();
if (connection instanceof HadoopClusterConnection) {
HadoopClusterConnection conn = (HadoopClusterConnection) connection;
String paramPrefix = prefixName + ConnectionContextHelper.LINE;
String paramName = null;
for (IConnParamName param : paramSet) {
if (param instanceof EHadoopParamName) {
EHadoopParamName hadoopParam = (EHadoopParamName) param;
paramName = paramPrefix + hadoopParam;
switch(hadoopParam) {
case NameNodeUri:
ConnectionContextHelper.createParameters(varList, paramName, conn.getNameNodeURI());
break;
case JobTrackerUri:
ConnectionContextHelper.createParameters(varList, paramName, conn.getJobTrackerURI());
break;
case ResourceManager:
ConnectionContextHelper.createParameters(varList, paramName, conn.getJobTrackerURI());
break;
case ResourceManagerScheduler:
ConnectionContextHelper.createParameters(varList, paramName, conn.getRmScheduler());
break;
case JobHistory:
ConnectionContextHelper.createParameters(varList, paramName, conn.getJobHistory());
break;
case StagingDirectory:
ConnectionContextHelper.createParameters(varList, paramName, conn.getStagingDirectory());
break;
case NameNodePrin:
ConnectionContextHelper.createParameters(varList, paramName, conn.getPrincipal());
break;
case JTOrRMPrin:
ConnectionContextHelper.createParameters(varList, paramName, conn.getJtOrRmPrincipal());
break;
case JobHistroyPrin:
ConnectionContextHelper.createParameters(varList, paramName, conn.getJobHistoryPrincipal());
break;
case User:
ConnectionContextHelper.createParameters(varList, paramName, conn.getUserName());
break;
case Group:
ConnectionContextHelper.createParameters(varList, paramName, conn.getGroup());
break;
case Principal:
ConnectionContextHelper.createParameters(varList, paramName, conn.getKeytabPrincipal());
break;
case KeyTab:
ConnectionContextHelper.createParameters(varList, paramName, conn.getKeytab());
break;
case maprTPassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getMaprTPassword(), JavaTypesManager.PASSWORD);
break;
case maprTCluster:
ConnectionContextHelper.createParameters(varList, paramName, conn.getMaprTCluster());
break;
case maprTDuration:
ConnectionContextHelper.createParameters(varList, paramName, conn.getMaprTDuration(), JavaTypesManager.LONG);
break;
case maprTHomeDir:
ConnectionContextHelper.createParameters(varList, paramName, conn.getMaprTHomeDir());
break;
case maprTHadoopLogin:
ConnectionContextHelper.createParameters(varList, paramName, conn.getMaprTHadoopLogin());
break;
case WebHostName:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_HOSTNAME));
break;
case WebPort:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_PORT));
break;
case WebUser:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_USERNAME));
break;
case WebJobResFolder:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_JOB_RESULT_FOLDER));
break;
case HDIUser:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HDI_USERNAME));
break;
case HDIPassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HDI_PASSWORD), JavaTypesManager.PASSWORD);
break;
case KeyAzureHost:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_AZURE_HOSTNAME));
break;
case KeyAzureContainer:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_AZURE_CONTAINER));
break;
case KeyAzuresUser:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_AZURE_USERNAME));
break;
case KeyAzurePassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_AZURE_PASSWORD), JavaTypesManager.PASSWORD);
break;
case KeyAzureDeployBlob:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_AZURE_DEPLOY_BLOB));
break;
case GoogleProjectId:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_PROJECT_ID));
break;
case GoogleClusterId:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_CLUSTER_ID));
break;
case GoogleRegion:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_REGION));
break;
case GoogleJarsBucket:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_JARS_BUCKET));
break;
case PathToGoogleCredentials:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_PATH_TO_GOOGLE_CREDENTIALS));
break;
case DataBricksEndpoint:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_ENDPOINT));
break;
case DataBricksCloudProvider:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLOUD_PROVIDER));
break;
case DatabricksRunMode:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_RUN_MODE));
break;
case DataBricksClusterId:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLUSTER_ID));
break;
case DataBricksToken:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_TOKEN), JavaTypesManager.PASSWORD);
break;
case DataBricksDBFSDepFolder:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_DBFS_DEP_FOLDER));
break;
case setHadoopConf:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SET_HADOOP_CONF), JavaTypesManager.BOOLEAN);
break;
case hadoopConfSpecificJar:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR));
break;
case WebHDFSSSLTrustStorePath:
ConnectionContextHelper.createParameters(varList, paramName, conn.getWebHDFSSSLTrustStorePath());
break;
case WebHDFSSSLTrustStorePassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getWebHDFSSSLTrustStorePassword(), JavaTypesManager.PASSWORD);
break;
case UseKnox:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_USE_KNOX));
break;
case SparkMode:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SPARK_MODE));
break;
case KnoxUrl:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_KNOX_URL));
break;
case KnoxUsername:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_KNOX_USER));
break;
case KnoxPassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_KNOX_PASSWORD), JavaTypesManager.PASSWORD);
break;
case KnoxDirectory:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_KNOX_DIRECTORY));
break;
case SynapseHostName:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_HOST));
break;
case SynapseAuthToken:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_AUTH_TOKEN), JavaTypesManager.PASSWORD);
break;
case SynapseSparkPools:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_SPARK_POOLS));
break;
case SynapseFsHostName:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_HOSTNAME));
break;
case SynapseFsContainer:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_CONTAINER));
break;
case SynapseFsUserName:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_USERNAME));
break;
case SynapseFsPassword:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_PASSWORD), JavaTypesManager.PASSWORD);
break;
case SynapseDeployBlob:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_DEPLOY_BLOB));
break;
case SynapseDriverMemory:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DRIVER_MEMORY));
break;
case SynapseDriverCores:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DRIVER_CORES));
break;
case SynapseExecutorMemory:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_EXECUTOR_MEMORY));
break;
case UseTuningProperties:
ConnectionContextHelper.createParameters(varList, paramName, conn.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_TUNING_PROPERTIES));
break;
default:
}
}
}
createHadoopPropertiesContextVariable(prefixName, varList, conn.getHadoopProperties());
createHadoopPropertiesContextVariable(prefixName, varList, conn.getSparkProperties());
}
return varList;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterImportHandler method findRelatedImportItems.
@Override
public List<ImportItem> findRelatedImportItems(IProgressMonitor monitor, ResourcesManager resManager, ImportItem importItem, ImportItem[] allImportItemRecords) throws Exception {
List<ImportItem> relatedItemRecords = new ArrayList<ImportItem>();
relatedItemRecords.addAll(super.findRelatedImportItems(monitor, resManager, importItem, allImportItemRecords));
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopClusterService.class)) {
IHadoopClusterService hadoopClusterService = (IHadoopClusterService) GlobalServiceRegister.getDefault().getService(IHadoopClusterService.class);
final Item item = importItem.getItem();
if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterItem(item)) {
resolveItem(resManager, importItem);
HadoopClusterConnection hcConnection = (HadoopClusterConnection) ((HadoopClusterConnectionItem) item).getConnection();
String clusterId = item.getProperty().getId();
for (ImportItem ir : allImportItemRecords) {
resolveItem(resManager, ir);
Item subItem = ir.getItem();
String hcId = null;
if (subItem instanceof HadoopSubConnectionItem) {
hcId = ((HadoopSubConnection) ((HadoopSubConnectionItem) subItem).getConnection()).getRelativeHadoopClusterId();
} else if (subItem instanceof DatabaseConnectionItem) {
hcId = ((DatabaseConnection) ((DatabaseConnectionItem) subItem).getConnection()).getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID);
}
if (clusterId.equals(hcId)) {
if (subItem instanceof HadoopSubConnectionItem) {
EList<String> connectionList = hcConnection.getConnectionList();
String subItemId = subItem.getProperty().getId();
if (!connectionList.contains(subItemId)) {
connectionList.add(subItemId);
}
}
relatedItemRecords.add(ir);
}
}
}
}
return relatedItemRecords;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class CreateMaprdbAction method hideAction.
@Override
protected boolean hideAction(RepositoryNode node) {
HadoopClusterConnectionItem hcConnectionItem = HCRepositoryUtil.getHCConnectionItemFromRepositoryNode(node);
if (hcConnectionItem != null) {
HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
DistributionBean maprdbDistribution = HadoopDistributionsHelper.MAPRDB.getDistribution(hcConnection.getDistribution(), false);
if (maprdbDistribution != null) {
IHDistributionVersion hdVersion = maprdbDistribution.getHDVersion(hcConnection.getDfVersion(), false);
if (hdVersion != null) {
// found, don't hide
if (GlobalServiceRegister.getDefault().isServiceRegistered(IHadoopDistributionService.class)) {
IHadoopDistributionService hadoopService = (IHadoopDistributionService) GlobalServiceRegister.getDefault().getService(IHadoopDistributionService.class);
if (hadoopService != null) {
return !hadoopService.doSupportMapRTicket(hdVersion);
}
}
}
}
}
return true;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class CreateMaprdbAction method initConnectionParameters.
@Override
protected void initConnectionParameters(Map<String, String> initMap, HadoopClusterConnectionItem hcConnectionItem) {
super.initConnectionParameters(initMap, hcConnectionItem);
HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
initMap.put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CUSTOM_JARS, hcConnection.getParameters().get(ECustomVersionGroup.MAPRDB.getName()));
initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_TYPE, EDatabaseConnTemplate.MAPRDB.getDBTypeName());
initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_PRODUCT, EDatabaseTypeName.MAPRDB.getProduct());
initMap.put(ConnParameterKeys.CONN_PARA_KEY_DB_PORT, EDatabaseConnTemplate.MAPRDB.getDefaultPort());
}
Aggregations