use of org.talend.dataprofiler.core.service.AbstractSvnRepositoryService in project tdq-studio-se by Talend.
the class CreateHiveTableActionProvider method fillContextMenu.
@Override
public void fillContextMenu(IMenuManager menu) {
if (!isShowMenu()) {
return;
}
AbstractSvnRepositoryService svnReposService = GlobalServiceRegister.getDefault().getSvnRepositoryService(AbstractSvnRepositoryService.class);
if (svnReposService != null && svnReposService.isReadonly()) {
return;
}
RepositoryNode node = (RepositoryNode) getFirstRepositoryNode();
if (node != null) {
IAction action = null;
if (node instanceof HDFSOfHCConnectionNode) {
action = HadoopClusterUtils.getDefault().createActionOfHiveTable(node);
menu.add(action);
}
}
}
use of org.talend.dataprofiler.core.service.AbstractSvnRepositoryService in project tdq-studio-se by Talend.
the class CreateAnalysisOnHDFSActionProvider method fillContextMenu.
@Override
public void fillContextMenu(IMenuManager menu) {
if (!isShowMenu()) {
return;
}
AbstractSvnRepositoryService svnReposService = GlobalServiceRegister.getDefault().getSvnRepositoryService(AbstractSvnRepositoryService.class);
if (svnReposService != null && svnReposService.isReadonly()) {
return;
}
RepositoryNode node = (RepositoryNode) getFirstRepositoryNode();
if (node != null) {
IAction action = null;
if (node instanceof HDFSOfHCConnectionNode) {
action = new CreateAnalysisOnHDFSAction(node);
menu.add(action);
}
}
}
use of org.talend.dataprofiler.core.service.AbstractSvnRepositoryService in project tdq-studio-se by Talend.
the class CreateHiveOfHCActionProvider method fillContextMenu.
@Override
public void fillContextMenu(IMenuManager menu) {
// MOD mzhao user readonly role on svn repository mode.
if (!isShowMenu()) {
return;
}
// MOD mzhao user readonly role on svn repository mode.
AbstractSvnRepositoryService svnReposService = GlobalServiceRegister.getDefault().getSvnRepositoryService(AbstractSvnRepositoryService.class);
if (svnReposService != null && svnReposService.isReadonly()) {
return;
}
// MOD gdbu 2011-4-1 bug 20051
RepositoryNode node = (RepositoryNode) getFirstRepositoryNode();
if (node != null) {
if (HadoopClusterUtils.getDefault().hideAction(node)) {
return;
}
// ~20051
IAction action = null;
if (node instanceof HadoopClusterConnectionRepNode || node instanceof HiveOfHCFolderRepNode) {
action = new CreateHiveOfHCAction(node);
menu.add(action);
}
}
}
use of org.talend.dataprofiler.core.service.AbstractSvnRepositoryService in project tdq-studio-se by Talend.
the class FileSystemImportWriter method write.
/*
* After check the conflicts of the imported object, calling this method (from ImportWizard) replace the conflicts
* object in the records if the record is valid; then call the finish to do migrate. OR: merge the conflict system
* indicators if valid.(overwrite)
*
* @see
* org.talend.dataprofiler.core.ui.imex.model.IImexWriter#write(org.talend.dataprofiler.core.ui.imex.model.ItemRecord
* [], org.eclipse.core.runtime.IProgressMonitor)
*/
public void write(ItemRecord[] records, IProgressMonitor monitor) {
if (monitor == null) {
monitor = new NullProgressMonitor();
}
final ItemRecord[] fRecords = records;
final IProgressMonitor fMonitor = monitor;
need2MergeModelElementMap.clear();
allImportItems.clear();
RepositoryWorkUnit<Object> workUnit = new // $NON-NLS-1$
RepositoryWorkUnit<Object>(// $NON-NLS-1$
"Import TDQ Element") {
@Override
protected void run() {
try {
for (ItemRecord record : fRecords) {
if (fMonitor.isCanceled()) {
break;
}
Map<IPath, IPath> toImportMap = mapping(record);
// $NON-NLS-1$
fMonitor.subTask("Importing " + record.getName());
if (record.isValid()) {
boolean isDeleted = false;
// $NON-NLS-1$
log.info("Importing " + record.getFile().getAbsolutePath());
// Delete the conflict node before import.
IRepositoryViewObject object = record.getConflictObject();
boolean isDelete = true;
ModelElement modEle = record.getElement();
if (object != null) {
// is a system indicator definition, (using its UUid to find this SI not label)
if (isIndicatorDefinition(modEle)) {
if (isDQRule(modEle)) {
if (isParserRule(modEle)) {
mergeParserRule(record, (TDQBusinessRuleItem) object.getProperty().getItem());
isDelete = false;
} else if (isWhereRule(modEle)) {
// do nothing here now
}
} else if (isMatchRuleDefinition(modEle)) {
// do nothing here now
} else {
// System Indicator and UDI need merge
TDQIndicatorDefinitionItem indItem = (TDQIndicatorDefinitionItem) object.getProperty().getItem();
mergeSystemIndicator(record, indItem);
// only add it when it is UDIndicatorDefinition
if (record.getElement() instanceof UDIndicatorDefinition) {
need2MergeModelElementMap.put(indItem, record.getElement());
}
isDelete = false;
}
} else if (isPattern(modEle)) {
TDQPatternItem patternItem = (TDQPatternItem) object.getProperty().getItem();
mergePattern(record, patternItem);
need2MergeModelElementMap.put(patternItem, record.getElement());
isDelete = false;
} else {
// remove the dependency of the object
EObjectHelper.removeDependencys(PropertyHelper.getModelElement(object.getProperty()));
isDeleted = true;
// delete the object
ProxyRepositoryFactory.getInstance().deleteObjectPhysical(object);
}
}
if (isDelete) {
updateFiles.clear();
updateFilesCoverd.clear();
for (IPath resPath : toImportMap.keySet()) {
IPath desPath = toImportMap.get(resPath);
ResourceSet resourceSet = ProxyRepositoryFactory.getInstance().getRepositoryFactoryFromProvider().getResourceManager().resourceSet;
synchronized (resourceSet) {
write(resPath, desPath);
allCopiedFiles.add(desPath.toFile());
}
allImportItems.add(desPath);
// TDQ-12180
if (isDeleted) {
AbstractSvnRepositoryService svnReposService = GlobalServiceRegister.getDefault().getSvnRepositoryService(AbstractSvnRepositoryService.class);
if (svnReposService != null) {
svnReposService.addIfImportOverride(desPath);
}
}
}
for (File file : updateFiles) {
update(file, false);
}
for (File file : updateFilesCoverd) {
update(file, true);
}
}
} else {
for (String error : record.getErrors()) {
log.error(error);
}
}
fMonitor.worked(1);
}
finish(fRecords, fMonitor);
} catch (Exception e) {
log.error(e, e);
}
}
};
workUnit.setAvoidUnloadResources(Boolean.TRUE);
ProxyRepositoryFactory.getInstance().executeRepositoryWorkUnit(workUnit);
// after above workUnit executed, the imported items will worked, than can do merge/update about UDI and Pattern
RepositoryWorkUnit<Object> workUnitFinish = new // $NON-NLS-1$
RepositoryWorkUnit<Object>(// $NON-NLS-1$
"Finish Import TDQ Element") {
@Override
protected void run() throws LoginException, PersistenceException {
try {
postFinish();
} catch (IOException e) {
log.error(e, e);
}
}
};
workUnitFinish.setAvoidUnloadResources(Boolean.TRUE);
ProxyRepositoryFactory.getInstance().executeRepositoryWorkUnit(workUnitFinish);
}
use of org.talend.dataprofiler.core.service.AbstractSvnRepositoryService in project tdq-studio-se by Talend.
the class CreateHDFSActionProvider method fillContextMenu.
@Override
public void fillContextMenu(IMenuManager menu) {
// MOD mzhao user readonly role on svn repository mode.
if (!isShowMenu()) {
return;
}
// MOD mzhao user readonly role on svn repository mode.
AbstractSvnRepositoryService svnReposService = GlobalServiceRegister.getDefault().getSvnRepositoryService(AbstractSvnRepositoryService.class);
if (svnReposService != null && svnReposService.isReadonly()) {
return;
}
// MOD gdbu 2011-4-1 bug 20051
RepositoryNode node = (RepositoryNode) getFirstRepositoryNode();
if (node != null) {
// ~20051
IAction action = null;
if (node instanceof HadoopClusterConnectionRepNode || node instanceof HDFSOfHCFolderRepNode) {
action = new CreateHDFSAction(node);
menu.add(action);
}
}
}
Aggregations