use of org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem in project tbd-studio-se by Talend.
the class HadoopClusterConnectionRelationshipHandler method collect.
@Override
protected Set<Relation> collect(Item baseItem) {
if (baseItem instanceof HadoopClusterConnectionItem) {
HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) baseItem;
HadoopClusterConnection connection = (HadoopClusterConnection) hcItem.getConnection();
if (connection != null) {
String version = connection.getDfVersion();
if (StringUtils.isNotEmpty(version)) {
if (DynamicDistributionManager.getInstance().isUsersDynamicDistribution(version)) {
Set<Relation> relationSet = new HashSet<Relation>();
Relation addedRelation = new Relation();
addedRelation.setId(version);
addedRelation.setType(RelationshipItemBuilder.DYNAMIC_DISTRIBUTION_RELATION);
addedRelation.setVersion(RelationshipItemBuilder.LATEST_VERSION);
relationSet.add(addedRelation);
return relationSet;
}
}
}
}
if (baseItem instanceof DatabaseConnectionItem) {
Connection connection = ((DatabaseConnectionItem) baseItem).getConnection();
if (connection != null) {
Object value = RepositoryToComponentProperty.getValue(connection, EParameterName.DB_VERSION.getName(), null);
if (value instanceof String) {
String version = (String) value;
if (DynamicDistributionManager.getInstance().isUsersDynamicDistribution(version)) {
Set<Relation> relationSet = new HashSet<Relation>();
Relation addedRelation = new Relation();
addedRelation.setId(version);
addedRelation.setType(RelationshipItemBuilder.DYNAMIC_DISTRIBUTION_RELATION);
addedRelation.setVersion(RelationshipItemBuilder.LATEST_VERSION);
relationSet.add(addedRelation);
return relationSet;
}
}
}
}
return Collections.emptySet();
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem in project tbd-studio-se by Talend.
the class ChangeHadoopConfJarNameMigrationTask method execute.
@Override
public ExecutionResult execute(Item item) {
ILibraryManagerService libService = null;
if (GlobalServiceRegister.getDefault().isServiceRegistered(ILibraryManagerService.class)) {
libService = (ILibraryManagerService) GlobalServiceRegister.getDefault().getService(ILibraryManagerService.class);
}
if (libService == null) {
return ExecutionResult.NOTHING_TO_DO;
}
if (item instanceof HadoopClusterConnectionItem) {
HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) item;
HadoopClusterConnection connection = (HadoopClusterConnection) hcItem.getConnection();
if (connection.getConfFile() == null) {
String confJarName = HadoopParameterUtil.getConfsJarDefaultName(hcItem.getProperty().getId());
if (!libService.contains(confJarName)) {
return ExecutionResult.NOTHING_TO_DO;
}
File confsTempFolder = new File(HadoopConfsUtils.getConfsJarTempFolder());
boolean retrieved = libService.retrieve(confJarName, confsTempFolder.getAbsolutePath(), false);
File confJarFile = new File(confsTempFolder, confJarName);
if (retrieved && confJarFile.exists()) {
try {
connection.setConfFile(FileUtils.readFileToByteArray(confJarFile));
ProxyRepositoryFactory.getInstance().save(hcItem, true);
// Clear cache of the new confs jar name.
String newConfJarName = HadoopParameterUtil.getConfsJarDefaultName(hcItem.getProperty().getLabel());
HadoopConfsUtils.removeFromDeployedCache(hcItem, newConfJarName);
return ExecutionResult.SUCCESS_WITH_ALERT;
} catch (Exception e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
}
}
return ExecutionResult.NOTHING_TO_DO;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem in project tbd-studio-se by Talend.
the class UpdateHadoopConfsForContextualHadoopClusterMigrationTask method execute.
@Override
public ExecutionResult execute(Item item) {
if (item instanceof HadoopClusterConnectionItem) {
boolean isModified = false;
HadoopClusterConnectionItem hcItem = (HadoopClusterConnectionItem) item;
try {
isModified = HadoopConfsUtils.updateContextualHadoopConfs(hcItem);
if (isModified) {
ProxyRepositoryFactory.getInstance().save(hcItem, true);
return ExecutionResult.SUCCESS_WITH_ALERT;
}
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.NOTHING_TO_DO;
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem in project tbd-studio-se by Talend.
the class CreateHadoopDBNodeAction method getWizard.
/*
* (non-Javadoc)
*
* @see
* org.talend.repository.hadoopcluster.action.common.CreateHadoopNodeAction#getWizard(org.eclipse.ui.IWorkbench,
* boolean, org.talend.repository.model.RepositoryNode, java.lang.String[])
*/
@Override
protected IWizard getWizard(IWorkbench workbench, boolean isCreate, RepositoryNode node, String[] existingNames) {
if (isCreate) {
RepositoryNode dbRootNode = (RepositoryNode) node.getRoot().getRootRepositoryNode(ERepositoryObjectType.METADATA_CONNECTIONS);
HadoopClusterConnectionItem hcConnectionItem = HCRepositoryUtil.getHCConnectionItemFromRepositoryNode(node);
Map<String, String> initMap = new HashMap<String, String>();
initConnectionParameters(initMap, hcConnectionItem);
return new DatabaseWizard(workbench, isCreate, dbRootNode, existingNames, initMap);
} else {
return new DatabaseWizard(workbench, isCreate, node, existingNames);
}
}
use of org.talend.repository.model.hadoopcluster.HadoopClusterConnectionItem in project tbd-studio-se by Talend.
the class HadoopClusterTokenCollector method getTokenDetailsForCurrentProject.
@Override
protected JSONObject getTokenDetailsForCurrentProject() throws Exception {
JSONObject typesHadoop = new JSONObject();
for (IRepositoryViewObject rvo : ProxyRepositoryFactory.getInstance().getAll(ERepositoryObjectType.getType(HADOOPCLUSTER))) {
HadoopClusterConnectionItem item = (HadoopClusterConnectionItem) rvo.getProperty().getItem();
HadoopClusterConnection connection = (HadoopClusterConnection) item.getConnection();
// $NON-NLS-1$
String distrib = connection.getDistribution() + "/" + connection.getDfVersion();
int nbDbTypes = 1;
if (typesHadoop.has(distrib)) {
nbDbTypes = typesHadoop.getInt(distrib);
nbDbTypes++;
}
typesHadoop.put(distrib, nbDbTypes);
}
JSONObject hadoopCluster = new JSONObject();
JSONObject types = new JSONObject();
types.put("types", typesHadoop);
hadoopCluster.put(HADOOPCLUSTER, types);
return hadoopCluster;
}
Aggregations