use of org.talend.core.model.components.filters.IComponentFilter in project tdi-studio-se by Talend.
the class ChangeTypeMappingForRedshift method execute.
/*
* (non-Javadoc)
*
* @see org.talend.core.model.migration.AbstractItemMigrationTask#execute(org.talend.core.model.properties.Item)
*/
@Override
public ExecutionResult execute(Item item) {
final ProcessType processType = getProcessType(item);
//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
String[] compNames = { "tRedshiftInput", "tRedshiftOutput", "tRedshiftLookupInput" };
IComponentConversion conversion = new IComponentConversion() {
@Override
public void transform(NodeType node) {
if (node == null) {
return;
}
//$NON-NLS-1$
ElementParameterType typeMapping = ComponentUtilities.getNodeProperty(node, "MAPPING");
if (typeMapping != null && "redshift_new_id".equals(typeMapping.getValue())) {
//$NON-NLS-1$ //$NON-NLS-2$
ComponentUtilities.setNodeValue(node, "MAPPING", "redshift_id");
}
}
};
for (String name : compNames) {
IComponentFilter filter = new NameComponentFilter(name);
try {
ModifyComponentsAction.searchAndModify(item, processType, filter, Arrays.<IComponentConversion>asList(conversion));
} catch (PersistenceException e) {
// TODO Auto-generated catch block
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.SUCCESS_NO_ALERT;
}
use of org.talend.core.model.components.filters.IComponentFilter in project tdi-studio-se by Talend.
the class ChangeUseBatchSizeToFalseForDBOuput method execute.
/*
* (non-Javadoc)
*
* @seeorg.talend.core.model.migration.AbstractJobMigrationTask#executeOnProcess(org.talend.core.model.properties.
* ProcessItem)
*/
@Override
public ExecutionResult execute(Item item) {
ProcessType processType = getProcessType(item);
if (getProject().getLanguage() != ECodeLanguage.JAVA || processType == null) {
return ExecutionResult.NOTHING_TO_DO;
}
String[] componentsName = new String[] { "tPostgresqlOutput", "tDB2Output", "tMysqlOutput", "tNetezzaOutput", "tSybaseOutput" };
try {
for (int i = 0; i < componentsName.length; i++) {
IComponentFilter filter = new NameComponentFilter(componentsName[i]);
ModifyComponentsAction.searchAndModify(item, processType, filter, Arrays.<IComponentConversion>asList(new IComponentConversion() {
public void transform(NodeType node) {
if (ComponentUtilities.getNodeProperty(node, "USE_BATCH_SIZE") == null) {
ComponentUtilities.addNodeProperty(node, "USE_BATCH_SIZE", "CHECK");
ComponentUtilities.getNodeProperty(node, "USE_BATCH_SIZE").setValue("false");
}
}
}));
}
return ExecutionResult.SUCCESS_NO_ALERT;
} catch (Exception e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
use of org.talend.core.model.components.filters.IComponentFilter in project tdi-studio-se by Talend.
the class ChangeExtendedInsert2FALSE4tRedshiftOutput method execute.
@Override
public ExecutionResult execute(Item item) {
ProcessType processType = getProcessType(item);
//$NON-NLS-1$
String[] componentsName = new String[] { "tRedshiftOutput" };
IComponentConversion converison = new IComponentConversion() {
public void transform(NodeType node) {
if (node == null) {
return;
}
//$NON-NLS-1$
ElementParameterType parameter = ComponentUtilities.getNodeProperty(node, "EXTENDINSERT");
if (parameter == null) {
//$NON-NLS-1$ //$NON-NLS-2$
ComponentUtilities.addNodeProperty(node, "EXTENDINSERT", "CHECK");
//$NON-NLS-1$ //$NON-NLS-2$
ComponentUtilities.setNodeValue(node, "EXTENDINSERT", "false");
}
}
};
for (String name : componentsName) {
IComponentFilter filter = new NameComponentFilter(name);
try {
ModifyComponentsAction.searchAndModify(item, processType, filter, Arrays.<IComponentConversion>asList(converison));
} catch (PersistenceException e) {
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.SUCCESS_WITH_ALERT;
}
use of org.talend.core.model.components.filters.IComponentFilter in project tdi-studio-se by Talend.
the class ChangeFileInputJSONUrlUnCheckWhenXpathCheck method execute.
@Override
public ExecutionResult execute(Item item) {
ProcessType processType = getProcessType(item);
//$NON-NLS-1$
String[] tFileInputJSON = { "tFileInputJSON" };
IComponentConversion changeUrlUnCheck = new IComponentConversion() {
@Override
public void transform(NodeType node) {
//$NON-NLS-1$
ElementParameterType readByXpath = ComponentUtilities.getNodeProperty(node, "READBYXPATH");
//$NON-NLS-1$
ElementParameterType useUrl = ComponentUtilities.getNodeProperty(node, "USEURL");
if (readByXpath != null && "true".equals(readByXpath.getValue())) {
//$NON-NLS-1$
if (useUrl != null && "true".equals(useUrl.getValue())) {
//$NON-NLS-1$
//$NON-NLS-1$ //$NON-NLS-2$
ComponentUtilities.setNodeValue(node, "USEURL", "false");
}
}
}
};
for (String name : tFileInputJSON) {
IComponentFilter filter = new NameComponentFilter(name);
try {
ModifyComponentsAction.searchAndModify(item, processType, filter, Arrays.<IComponentConversion>asList(changeUrlUnCheck));
} catch (PersistenceException e) {
// TODO Auto-generated catch block
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.SUCCESS_NO_ALERT;
}
use of org.talend.core.model.components.filters.IComponentFilter in project tdi-studio-se by Talend.
the class ChangeHadoopVersionValue4TDQComponents method execute.
@Override
public ExecutionResult execute(Item item) {
ProcessType processType = getProcessType(item);
//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
String[] tdqHadoopCompNames = { "tGenKeyHadoop", "tMatchGroupHadoop" };
IComponentConversion changeHDFSVersionValue = new IComponentConversion() {
public void transform(NodeType node) {
//$NON-NLS-1$
ElementParameterType version = ComponentUtilities.getNodeProperty(node, "DB_VERSION");
if (version != null) {
String value = version.getValue();
if (value == null) {
return;
}
if (value.equals("hadoop-core-0.20.204.0.jar;commons-logging-1.1.1.jar")) {
//$NON-NLS-1$
//$NON-NLS-1$
version.setValue("APACHE_0_20_204");
} else if (value.startsWith("hadoop-0.20.2-cdh3u1-core.jar;commons-logging-1.0.4.jar")) {
//$NON-NLS-1$
//$NON-NLS-1$
version.setValue("Cloudera_0_20_CDH3U1");
} else if (value.startsWith("hadoop-core-1.0.0.jar;commons-logging-1.1.1.jar")) {
//$NON-NLS-1$
//$NON-NLS-1$
version.setValue("HADOOP_1_0_0");
} else if (value.startsWith("hadoop-core-1.0.0.jar;commons-logging-1.1.1.jar;hdp-dummy.jar")) {
//$NON-NLS-1$
//$NON-NLS-1$
version.setValue("HDP_1_0");
} else if (value.startsWith("hadoop-auth-2.0.0-cdh4.0.1.jar;hadoop-common-2.0.0-cdh4.0.1.jar;hadoop-hdfs-2.0.0-cdh4.0.1.jar;hadoop-core-2.0.0-mr1-cdh4.0.1.jar;protobuf-java-2.4.0a.jar;slf4j-api-1.6.1.jar;slf4j-log4j12-1.6.1.jar;guava-11.0.2.jar;commons-logging-1.1.1.jar")) {
//$NON-NLS-1$
//$NON-NLS-1$
version.setValue("Cloudera_CDH4");
}
}
}
};
for (String name : tdqHadoopCompNames) {
IComponentFilter filter = new NameComponentFilter(name);
try {
ModifyComponentsAction.searchAndModify(item, processType, filter, Arrays.<IComponentConversion>asList(changeHDFSVersionValue));
} catch (PersistenceException e) {
// TODO Auto-generated catch block
ExceptionHandler.process(e);
return ExecutionResult.FAILURE;
}
}
return ExecutionResult.SUCCESS_NO_ALERT;
}
Aggregations