use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HDFSWizard method initConnectionFromHadoopCluster.
@Override
protected void initConnectionFromHadoopCluster(HDFSConnection hadoopConnection, RepositoryNode node) {
HadoopClusterConnectionItem hcConnectionItem = HCRepositoryUtil.getHCConnectionItemFromRepositoryNode(node);
if (hcConnectionItem != null) {
HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
hadoopConnection.setRelativeHadoopClusterId(hcConnectionItem.getProperty().getId());
hadoopConnection.setUserName(ConnectionContextHelper.getParamValueOffContext(hcConnection, hcConnection.getUserName()));
}
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class CreateHDFSAction method hideAction.
@Override
protected boolean hideAction(RepositoryNode node) {
HadoopClusterConnectionItem hcConnectionItem = HCRepositoryUtil.getHCConnectionItemFromRepositoryNode(node);
if (hcConnectionItem != null) {
HadoopClusterConnection hcConnection = (HadoopClusterConnection) hcConnectionItem.getConnection();
DistributionBean hdfsDistribution = HadoopDistributionsHelper.HDFS.getDistribution(hcConnection.getDistribution(), false);
if (hdfsDistribution != null) {
if (hdfsDistribution.getDisplayName() == EHadoopDistributions.GOOGLE_CLOUD_DATAPROC.getDisplayName()) {
return true;
}
IHDistributionVersion hdVersion = hdfsDistribution.getHDVersion(hcConnection.getDfVersion(), false);
if (hdVersion != null) {
// found, don't hide
return false;
}
}
}
return true;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HCRepositoryUtil method getSubitemsOfHadoopCluster.
/**
* DOC ycbai Comment method "getSubitemsOfHadoopCluster".
*
* Get subitems of hadoop cluster like hdfs, hcatalog, hive etc.
*
* @param item
* @return
* @throws PersistenceException
*/
public static Set<Item> getSubitemsOfHadoopCluster(Item item) throws PersistenceException {
Set<Item> subItems = new HashSet<Item>();
if (item.eClass() != HadoopClusterPackage.Literals.HADOOP_CLUSTER_CONNECTION_ITEM) {
return subItems;
}
Project project = new Project(ProjectManager.getInstance().getProject(item.getProperty()));
HadoopClusterConnectionItem clusterConnectionItem = (HadoopClusterConnectionItem) item;
HadoopClusterConnection clusterConnection = (HadoopClusterConnection) clusterConnectionItem.getConnection();
EList<String> connectionList = clusterConnection.getConnectionList();
for (String connId : connectionList) {
if (connId != null) {
IRepositoryViewObject repObj = ProxyRepositoryFactory.getInstance().getLastVersion(project, connId);
if (repObj != null && repObj.getProperty() != null) {
Item subItem = repObj.getProperty().getItem();
if (subItem != null) {
subItems.add(subItem);
}
}
}
}
String clusterId = clusterConnectionItem.getProperty().getId();
List<IRepositoryViewObject> repObjs = ProxyRepositoryFactory.getInstance().getAll(project, ERepositoryObjectType.METADATA_CONNECTIONS);
for (IRepositoryViewObject repObj : repObjs) {
if (repObj != null && repObj.getProperty() != null) {
DatabaseConnectionItem dbItem = (DatabaseConnectionItem) repObj.getProperty().getItem();
DatabaseConnection dbConnection = (DatabaseConnection) dbItem.getConnection();
String hcId = dbConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID);
if (clusterId.equals(hcId)) {
subItems.add(dbItem);
}
}
}
return subItems;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HCRepositoryUtil method getHadoopDbParameters.
public static Map<String, String> getHadoopDbParameters(HadoopClusterConnectionItem clusterItem) {
Map<String, String> parameters = new HashMap<>();
if (clusterItem == null) {
return parameters;
}
HadoopClusterConnection hcConnection = (HadoopClusterConnection) clusterItem.getConnection();
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CLUSTER_ID, clusterItem.getProperty().getId());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_NAME_NODE_URL, hcConnection.getNameNodeURI());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_JOB_TRACKER_URL, hcConnection.getJobTrackerURI());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USE_YARN, String.valueOf(hcConnection.isUseYarn()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USE_CUSTOM_CONFS, String.valueOf(hcConnection.isUseCustomConfs()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USE_KRB, String.valueOf(hcConnection.isEnableKerberos()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_NAME_NODE_PRINCIPAL, hcConnection.getPrincipal());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_JOB_TRACKER_PRINCIPAL, hcConnection.getJtOrRmPrincipal());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_JOB_HISTORY_PRINCIPAL, hcConnection.getJobHistoryPrincipal());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_RESOURCEMANAGER_SCHEDULER_ADDRESS, hcConnection.getRmScheduler());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_JOBHISTORY_ADDRESS, hcConnection.getJobHistory());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_STAGING_DIRECTORY, hcConnection.getStagingDirectory());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USE_DATANODE_HOSTNAME, String.valueOf(hcConnection.isUseDNHost()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_DB_SERVER, HadoopParameterUtil.getHostNameFromNameNodeURI(ConnectionContextHelper.getParamValueOffContext(hcConnection, hcConnection.getNameNodeURI())));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USERNAME, hcConnection.getUserName());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_DISTRIBUTION, hcConnection.getDistribution());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_VERSION, hcConnection.getDfVersion());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_DISTRIBUTION, hcConnection.getDistribution());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_VERSION, hcConnection.getDfVersion());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_DISTRIBUTION, hcConnection.getDistribution());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_VERSION, hcConnection.getDfVersion());
//
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_GOOGLE_PROJECT_ID, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_PROJECT_ID));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_GOOGLE_CLUSTER_ID, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_CLUSTER_ID));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_GOOGLE_REGION, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_REGION));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_GOOGLE_JARS_BUCKET, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_JARS_BUCKET));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_DEFINE_PATH_TO_GOOGLE_CREDENTIALS, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_DEFINE_PATH_TO_GOOGLE_CREDENTIALS));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_PATH_TO_GOOGLE_CREDENTIALS, hcConnection.getParameters().get(ConnParameterKeys.CONN_PARA_KEY_PATH_TO_GOOGLE_CREDENTIALS));
if (hcConnection.isEnableKerberos()) {
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USEKEYTAB, String.valueOf(hcConnection.isUseKeytab()));
if (hcConnection.isUseKeytab()) {
parameters.put(ConnParameterKeys.CONN_PARA_KEY_KEYTAB_PRINCIPAL, hcConnection.getKeytabPrincipal());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_KEYTAB, hcConnection.getKeytab());
}
}
if (hcConnection.isUseWebHDFSSSL()) {
parameters.put(ConnParameterKeys.CONN_PARA_KEY_USE_WEBHDFS_SSL, String.valueOf(hcConnection.isUseWebHDFSSSL()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PATH, hcConnection.getWebHDFSSSLTrustStorePath());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_WEBHDFS_SSL_TRUST_STORE_PASSWORD, hcConnection.getWebHDFSSSLTrustStorePassword());
}
// hbase/hive/maprdb
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_AUTHENTICATION_USE_MAPRTICKET, String.valueOf(hcConnection.isEnableMaprT()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_AUTHENTICATION_USE_MAPRTICKET, String.valueOf(hcConnection.isEnableMaprT()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_USE_MAPRTICKET, String.valueOf(hcConnection.isEnableMaprT()));
if (hcConnection.isEnableMaprT()) {
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_AUTHENTICATION_USERNAME, hcConnection.getUserName());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_AUTHENTICATION_MAPRTICKET_PASSWORD, hcConnection.getMaprTPassword());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_AUTHENTICATION_MAPRTICKET_CLUSTER, hcConnection.getMaprTCluster());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HBASE_AUTHENTICATION_MAPRTICKET_DURATION, hcConnection.getMaprTDuration());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_AUTHENTICATION_USERNAME, hcConnection.getUserName());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_AUTHENTICATION_MAPRTICKET_PASSWORD, hcConnection.getMaprTPassword());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_AUTHENTICATION_MAPRTICKET_CLUSTER, hcConnection.getMaprTCluster());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRDB_AUTHENTICATION_MAPRTICKET_DURATION, hcConnection.getMaprTDuration());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_USERNAME, hcConnection.getUserName());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_MAPRTICKET_PASSWORD, hcConnection.getMaprTPassword());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_MAPRTICKET_CLUSTER, hcConnection.getMaprTCluster());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_HIVE_AUTHENTICATION_MAPRTICKET_DURATION, hcConnection.getMaprTDuration());
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRTICKET_SETMAPRHOMEDIR, String.valueOf(hcConnection.isSetMaprTHomeDir()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRTICKET_MAPRHOMEDIR, ConnectionContextHelper.getParamValueOffContext(hcConnection, hcConnection.getMaprTHomeDir()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRTICKET_SETMAPRHADOOPLOGIN, String.valueOf(hcConnection.isSetHadoopLogin()));
parameters.put(ConnParameterKeys.CONN_PARA_KEY_MAPRTICKET_MAPRHADOOPLOGIN, ConnectionContextHelper.getParamValueOffContext(hcConnection, hcConnection.getMaprTHadoopLogin()));
}
return parameters;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterContextHandler method setPropertiesForContextMode.
@Override
public void setPropertiesForContextMode(String prefixName, Connection connection, Set<IConnParamName> paramSet) {
if (connection == null) {
return;
}
if (connection instanceof HadoopClusterConnection) {
HadoopClusterConnection hadoopConn = (HadoopClusterConnection) connection;
String originalVariableName = prefixName + ConnectionContextHelper.LINE;
String hadoopVariableName = null;
for (IConnParamName param : paramSet) {
if (param instanceof EHadoopParamName) {
EHadoopParamName hadoopConnectionParam = (EHadoopParamName) param;
originalVariableName = prefixName + ConnectionContextHelper.LINE;
hadoopVariableName = originalVariableName + hadoopConnectionParam;
matchContextForAttribues(hadoopConn, hadoopConnectionParam, hadoopVariableName);
}
}
String hadoopProperties = hadoopConn.getHadoopProperties();
List<Map<String, Object>> propertiesAfterContext = transformHadoopPropertiesForContextMode(HadoopRepositoryUtil.getHadoopPropertiesList(hadoopProperties), prefixName);
hadoopConn.setHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(propertiesAfterContext));
//
String sparkProperties = hadoopConn.getSparkProperties();
List<Map<String, Object>> sparkPropertiesAfterContext = transformHadoopPropertiesForContextMode(HadoopRepositoryUtil.getHadoopPropertiesList(sparkProperties), prefixName);
hadoopConn.setSparkProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(sparkPropertiesAfterContext));
}
}
Aggregations