use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterContextHandler method getConAdditionPropertiesForContextMode.
@Override
public Set<String> getConAdditionPropertiesForContextMode(Connection conn) {
Set<String> conVarList = new HashSet<String>();
if (conn instanceof HadoopClusterConnection) {
HadoopClusterConnection hadoopConn = (HadoopClusterConnection) conn;
conVarList = getConAdditionProperties(HadoopRepositoryUtil.getHadoopPropertiesList(hadoopConn.getHadoopProperties()));
conVarList.addAll(getConAdditionProperties(HadoopRepositoryUtil.getHadoopPropertiesList(hadoopConn.getSparkProperties())));
}
return conVarList;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopClusterContextHandler method matchAdditionProperties.
@Override
protected void matchAdditionProperties(Connection conn, Map<ContextItem, List<ConectionAdaptContextVariableModel>> adaptMap) {
if (conn instanceof HadoopClusterConnection) {
HadoopClusterConnection hadoopConn = (HadoopClusterConnection) conn;
if (adaptMap != null && !adaptMap.isEmpty()) {
List<Map<String, Object>> hadoopListProperties = HadoopRepositoryUtil.getHadoopPropertiesList(hadoopConn.getHadoopProperties());
Set<String> keys = getConAdditionPropertiesForContextMode(conn);
List<Map<String, Object>> sparkListProperties = HadoopRepositoryUtil.getHadoopPropertiesList(hadoopConn.getSparkProperties());
Set<String> sparkKeys = getConAdditionPropertiesForContextMode(conn);
for (Map.Entry<ContextItem, List<ConectionAdaptContextVariableModel>> entry : adaptMap.entrySet()) {
List<ConectionAdaptContextVariableModel> modelList = entry.getValue();
for (ConectionAdaptContextVariableModel model : modelList) {
String propertyKey = model.getValue();
if (keys.contains(propertyKey)) {
List<Map<String, Object>> propertiesAfterContext = transformHadoopPropertiesForExistContextMode(hadoopListProperties, propertyKey, model.getName());
hadoopConn.setHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(propertiesAfterContext));
}
if (sparkKeys.contains(propertyKey)) {
List<Map<String, Object>> propertiesAfterContext = transformHadoopPropertiesForExistContextMode(sparkListProperties, propertyKey, model.getName());
hadoopConn.setSparkProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(propertiesAfterContext));
}
}
}
}
}
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class ClouderaNavigatorOptionPage method init.
private void init() {
HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
navigator_usernameText.setText(connection.getClouderaNaviUserName());
navigator_passwordText.setText(connection.getClouderaNaviPassword());
navigator_urlText.setText(connection.getClouderaNaviUrl());
navigator_metatata_urlText.setText(connection.getClouderaNaviMetadataUrl());
navigator_client_urlText.setText(connection.getClouderaNaviClientUrl());
navigator_autocommitBtn.setSelection(connection.isClouderaAutoCommit());
navigator_disable_sslBtn.setSelection(connection.isClouderaDisableSSL());
if (!creation) {
navigator_die_on_errorBtn.setSelection(connection.isClouderaDieNoError());
}
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class HadoopImportConfsWizard method performFinish.
@Override
public boolean performFinish() {
IImportConfsWizardPage currentPage = getCurrentConfPage();
if (currentPage != null) {
final IRetrieveConfsService confsService = currentPage.getConfsService();
final boolean isCreateConnectionFromConfs = currentPage.isSupportCreateServiceConnection();
try {
if (confsService != null) {
currentPage.applyFilter();
List<String> selectedServices = currentPage.getSelectedServices();
final String confsDir = confsService.exportConfs(selectedServices);
if (confsDir != null) {
this.getContainer().run(true, true, new IRunnableWithProgress() {
@Override
public void run(final IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
// $NON-NLS-1$
monitor.beginTask(// $NON-NLS-1$
Messages.getString("HadoopImportConfsWizard.doingImport"), IProgressMonitor.UNKNOWN);
try {
if (contextGroup == null) {
confJarName = HadoopConfsUtils.getConfsJarDefaultName(connectionItem, false);
} else {
confJarName = HadoopConfsUtils.getConfsJarDefaultName(connectionItem, false, contextGroup);
}
HadoopConfsUtils.buildAndDeployConfsJar(connectionItem, contextGroup, confsDir, confJarName);
} catch (Exception e) {
confJarName = null;
throw new InvocationTargetException(e);
} finally {
monitor.done();
}
}
});
}
HadoopConfsManager confsManager = HadoopConfsManager.getInstance();
confsManager.setHadoopClusterId(connectionItem.getProperty().getId());
confsManager.setConfsMap(getSelectedConfsMap(selectedServices, confsService.getConfsMap()));
confsManager.setCreateConnectionFromConfs(isCreateConnectionFromConfs);
}
if (creation) {
HadoopConfsUtils.setConnectionParameters(connectionItem, optionPage.getDistribution(), optionPage.getVersion(), confsService);
if ("SPARK".equals(((HadoopClusterConnectionImpl) this.connectionItem.getConnection()).getDistribution()) && confJarName != null) {
MavenArtifact artifact = new MavenArtifact();
artifact.setGroupId("org.talend.libraries");
artifact.setArtifactId(confJarName.split(".jar")[0]);
artifact.setVersion("6.0.0-SNAPSHOT");
artifact.setType(MavenConstants.TYPE_JAR);
HadoopClusterConnection connection = (HadoopClusterConnection) connectionItem.getConnection();
connection.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR, PomUtil.getArtifactFullPath(artifact));
}
if (parentForm != null) {
parentForm.reload();
}
}
} catch (Exception e) {
ExceptionHandler.process(e);
}
}
return true;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnection in project tbd-studio-se by Talend.
the class AbstractHadoopSubForm method addHadoopPropertiesFields.
protected void addHadoopPropertiesFields() {
T connection = getConnection();
HadoopClusterConnection hcConnection = HCRepositoryUtil.getRelativeHadoopClusterConnection(connection);
String hadoopPropertiesOfCluster = StringUtils.trimToEmpty(hcConnection.getHadoopProperties());
List<Map<String, Object>> hadoopPropertiesListOfCluster = HadoopRepositoryUtil.getHadoopPropertiesList(hadoopPropertiesOfCluster);
propertiesDialog = new HadoopPropertiesDialog(getShell(), hadoopPropertiesListOfCluster, getHadoopProperties()) {
@Override
protected boolean isReadOnly() {
return !isEditable();
}
@Override
protected List<Map<String, Object>> getLatestInitProperties() {
return getHadoopProperties();
}
@Override
public void applyProperties(List<Map<String, Object>> properties) {
getConnection().setHadoopProperties(HadoopRepositoryUtil.getHadoopPropertiesJsonStr(properties));
}
};
propertiesDialog.createPropertiesFields(this);
}
Aggregations