use of org.talend.components.salesforce.tsalesforceinput.TSalesforceInputProperties in project components by Talend.
the class SalesforceWriterTestIT method testUpsertAdditionalInfo.
/**
* Test tSalesforceOutput add additional information for upsert
*/
@Test
public void testUpsertAdditionalInfo() throws Throwable {
// 1.Prepare output component configuration
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Contact");
sfProps.module.main.schema.setValue(SCHEMA_CONTACT);
sfProps.outputAction.setValue(OutputAction.UPSERT);
sfProps.ceaseForError.setValue(false);
sfProps.extendInsert.setValue(false);
sfProps.retrieveInsertId.setValue(true);
sfProps.upsertKeyColumn.setValue("Email");
sfProps.module.schemaListener.afterSchema();
// 2.Prepare the data
List records = new ArrayList<IndexedRecord>();
String random = String.valueOf(createNewRandom());
IndexedRecord r1 = new GenericData.Record(SCHEMA_CONTACT);
r1.put(0, "aaa" + random + "@talend.com");
r1.put(1, "F_" + random);
r1.put(2, "L_" + random);
IndexedRecord r2 = new GenericData.Record(SCHEMA_CONTACT);
r2.put(0, "bbb" + random + "@talend.com");
IndexedRecord r3 = new GenericData.Record(SCHEMA_CONTACT);
r3.put(0, "ccc" + random + "@talend.com");
r3.put(1, "F_" + random);
r3.put(2, "L_" + random);
IndexedRecord r4 = new GenericData.Record(SCHEMA_CONTACT);
r4.put(0, "aaa" + random + "@talend.com");
r4.put(1, "F_update_" + random);
r4.put(2, "L_update_" + random);
// 3. Write data
SalesforceSink salesforceSink = new SalesforceSink();
salesforceSink.initialize(adaptor, sfProps);
salesforceSink.validate(adaptor);
SalesforceWriter writer = salesforceSink.createWriteOperation().createWriter(adaptor);
List<IndexedRecord> successRecords = new ArrayList<>();
List<IndexedRecord> rejectRecords = new ArrayList<>();
writer.open("foo");
try {
// writing and collect the result
// insert
writer.write(r1);
successRecords.addAll(writer.getSuccessfulWrites());
rejectRecords.addAll(writer.getRejectedWrites());
// reject
writer.write(r2);
successRecords.addAll(writer.getSuccessfulWrites());
rejectRecords.addAll(writer.getRejectedWrites());
// insert
writer.write(r3);
successRecords.addAll(writer.getSuccessfulWrites());
rejectRecords.addAll(writer.getRejectedWrites());
// update
writer.write(r4);
successRecords.addAll(writer.getSuccessfulWrites());
rejectRecords.addAll(writer.getRejectedWrites());
} finally {
writer.close();
}
// 4.Check the write return IndexRecords whether include expect information
assertEquals(3, successRecords.size());
assertEquals(1, rejectRecords.size());
IndexedRecord record_1 = successRecords.get(0);
IndexedRecord record_2 = successRecords.get(1);
IndexedRecord record_3 = successRecords.get(2);
Schema recordSchema = record_1.getSchema();
assertEquals(6, recordSchema.getFields().size());
assertEquals(4, recordSchema.getField(TSalesforceOutputProperties.FIELD_SALESFORCE_ID).pos());
assertEquals(5, recordSchema.getField(TSalesforceOutputProperties.FIELD_STATUS).pos());
assertEquals("aaa" + random + "@talend.com", record_1.get(0));
assertNotNull(record_1.get(4));
assertEquals("created", record_1.get(5));
assertEquals("ccc" + random + "@talend.com", record_2.get(0));
assertNotNull(record_2.get(4));
assertEquals("created", record_2.get(5));
assertEquals("aaa" + random + "@talend.com", record_3.get(0));
assertEquals(record_3.get(4), record_1.get(4));
assertEquals("updated", record_3.get(5));
// 5.Check the result in salesforce
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
sfInputProps.copyValuesFrom(sfProps);
sfInputProps.condition.setValue("FirstName like '%" + random + "'");
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
assertEquals(2, inpuRecords.size());
IndexedRecord inputRecords_1 = inpuRecords.get(0);
IndexedRecord inputRecords_2 = inpuRecords.get(1);
assertThat(Arrays.asList("aaa" + random + "@talend.com", "ccc" + random + "@talend.com"), containsInAnyOrder(inputRecords_1.get(0), inputRecords_2.get(0)));
assertThat(Arrays.asList("F_" + random, "F_update_" + random), containsInAnyOrder(inputRecords_1.get(1), inputRecords_2.get(1)));
assertThat(Arrays.asList("L_" + random, "L_update_" + random), containsInAnyOrder(inputRecords_1.get(2), inputRecords_2.get(2)));
// 6.Delete test data
deleteRows(inpuRecords, sfInputProps);
}
use of org.talend.components.salesforce.tsalesforceinput.TSalesforceInputProperties in project components by Talend.
the class SalesforceWriterTestIT method getFirstCreatedAccountRecordId.
public String getFirstCreatedAccountRecordId() throws Exception {
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
SalesforceTestBase.setupProps(sfInputProps.connection, false);
sfInputProps.module.setValue("moduleName", "Account");
sfInputProps.module.main.schema.setValue(SCHEMA_UPDATE_ACCOUNT);
sfInputProps.condition.setValue("Id != null ORDER BY CreatedDate");
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
String firstId = null;
if (inpuRecords != null && inpuRecords.size() > 0) {
LOGGER.debug("Retrieve records size from Account is:" + inpuRecords.size());
assertNotNull(inpuRecords.get(0).get(0));
firstId = String.valueOf(inpuRecords.get(0).get(0));
LOGGER.debug("The first record Id:" + firstId);
} else {
LOGGER.error("Module Account have no records!");
}
return firstId;
}
use of org.talend.components.salesforce.tsalesforceinput.TSalesforceInputProperties in project components by Talend.
the class SalesforceWriterTestIT method testUploadAttachment.
@Test
public void testUploadAttachment() throws Throwable {
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Attachment");
sfProps.module.main.schema.setValue(SCHEMA_ATTACHMENT);
sfProps.ceaseForError.setValue(true);
sfProps.module.schemaListener.afterSchema();
List records = new ArrayList<IndexedRecord>();
String random = String.valueOf(createNewRandom());
LOGGER.debug("Getting the ParentId for attachment reocrds...");
String parentId = getFirstCreatedAccountRecordId();
LOGGER.debug("ParentId for attachments is:" + parentId);
IndexedRecord r1 = new GenericData.Record(SCHEMA_ATTACHMENT);
r1.put(0, "attachment_1_" + random + ".txt");
r1.put(1, "VGhpcyBpcyBhIHRlc3QgZmlsZSAxICE=");
r1.put(2, "text/plain");
r1.put(3, parentId);
IndexedRecord r2 = new GenericData.Record(SCHEMA_ATTACHMENT);
r2.put(0, "attachment_2_" + random + ".txt");
r2.put(1, "QmFzZSA2NC1lbmNvZGVkIGJpbmFyeSBkYXRhLiBGaWVsZHMgb2YgdGhpcyB0eXBlIGFyZSB1c2VkIGZvciBzdG9yaW5" + "nIGJpbmFyeSBmaWxlcyBpbiBBdHRhY2htZW50IHJlY29yZHMsIERvY3VtZW50IHJlY29yZHMsIGFuZCBTY2" + "9udHJvbCByZWNvcmRzLiBJbiB0aGVzZSBvYmplY3RzLCB0aGUgQm9keSBvciBCaW5hcnkgZmllbGQgY29udGFpbn" + "MgdGhlIChiYXNlNjQgZW5jb2RlZCkgZGF0YSwgd2hpbGUgdGhlIEJvZHlMZW5ndGggZmllbGQgZGVmaW5lcyB0aGU" + "gbGVuZ3RoIG9mIHRoZSBkYXRhIGluIHRoZSBCb2R5IG9yIEJpbmFyeSBmaWVsZC4gSW4gdGhlIERvY3VtZW50IG9" + "iamVjdCwgeW91IGNhbiBzcGVjaWZ5IGEgVVJMIHRvIHRoZSBkb2N1bWVudCBpbnN0ZWFkIG9mIHN0b3JpbmcgdGh" + "lIGRvY3VtZW50IGRpcmVjdGx5IGluIHRoZSByZWNvcmQu");
r2.put(2, "text/plain");
r2.put(3, parentId);
records.add(r1);
records.add(r2);
SalesforceSink salesforceSink = new SalesforceSink();
salesforceSink.initialize(adaptor, sfProps);
salesforceSink.validate(adaptor);
Writer<Result> batchWriter = salesforceSink.createWriteOperation().createWriter(adaptor);
LOGGER.debug("Uploading 2 attachments ...");
writeRows(batchWriter, records);
assertEquals(2, ((SalesforceWriter) batchWriter).getSuccessfulWrites().size());
LOGGER.debug("2 attachments uploaded successfully!");
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
sfInputProps.copyValuesFrom(sfProps);
sfInputProps.condition.setValue("Name = 'attachment_1_" + random + ".txt' or Name = 'attachment_2_" + random + ".txt'");
sfInputProps.module.main.schema.setValue(SCHEMA_ATTACHMENT);
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
try {
assertEquals(2, inpuRecords.size());
IndexedRecord inputRecords_1 = null;
IndexedRecord inputRecords_2 = null;
if (("attachment_1_" + random + ".txt").equals(String.valueOf(inpuRecords.get(0).get(0)))) {
inputRecords_1 = inpuRecords.get(0);
inputRecords_2 = inpuRecords.get(1);
} else {
inputRecords_1 = inpuRecords.get(1);
inputRecords_2 = inpuRecords.get(0);
}
assertEquals("attachment_1_" + random + ".txt", inputRecords_1.get(0));
assertEquals("attachment_2_" + random + ".txt", inputRecords_2.get(0));
assertEquals("VGhpcyBpcyBhIHRlc3QgZmlsZSAxICE=", inputRecords_1.get(1));
assertEquals("Base 64-encoded binary data. Fields of this type are used for storing binary files in Attachment " + "records, Document records, and Scontrol records. In these objects, the Body or Binary " + "field contains the (base64 encoded) data, while the BodyLength field defines the length" + " of the data in the Body or Binary field. In the Document object, you can specify a " + "URL to the document instead of storing the document directly in the record.", new String(Base64.decode(((String) inputRecords_2.get(1)).getBytes())));
assertEquals("text/plain", inputRecords_1.get(2));
assertEquals("text/plain", inputRecords_2.get(2));
assertEquals(parentId, inputRecords_1.get(3));
assertEquals(parentId, inputRecords_2.get(3));
assertNotNull(inputRecords_1.get(4));
assertNotNull(inputRecords_2.get(4));
} finally {
deleteRows(inpuRecords, sfInputProps);
}
}
use of org.talend.components.salesforce.tsalesforceinput.TSalesforceInputProperties in project components by Talend.
the class SalesforceWriterTestIT method cleanupAllRecords.
@AfterClass
public static void cleanupAllRecords() throws NoSuchElementException, IOException {
List<IndexedRecord> recordsToClean = new ArrayList<>();
String prefixToDelete = UNIQUE_NAME + "_" + UNIQUE_ID;
// Get the list of records that match the prefix to delete.
{
TSalesforceInputProperties sfProps = getSalesforceInputProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Account");
sfProps.module.main.schema.setValue(SCHEMA_UPDATE_ACCOUNT);
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
// Initialize the Source and Reader
SalesforceSource sfSource = new SalesforceSource();
sfSource.initialize(container, sfProps);
sfSource.validate(container);
int nameIndex = -1;
@SuppressWarnings("unchecked") Reader<IndexedRecord> sfReader = sfSource.createReader(container);
if (sfReader.start()) {
do {
IndexedRecord r = sfReader.getCurrent();
if (nameIndex == -1) {
nameIndex = r.getSchema().getField("Name").pos();
}
if (String.valueOf(r.get(nameIndex)).startsWith(prefixToDelete)) {
recordsToClean.add(r);
}
} while (sfReader.advance());
}
}
// Delete those records.
{
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.outputAction.setValue(OutputAction.DELETE);
sfProps.module.setValue("moduleName", "Account");
sfProps.module.main.schema.setValue(SCHEMA_UPDATE_ACCOUNT);
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
// Initialize the Sink, WriteOperation and Writer
SalesforceSink sfSink = new SalesforceSink();
sfSink.initialize(container, sfProps);
sfSink.validate(container);
SalesforceWriteOperation sfWriteOp = sfSink.createWriteOperation();
sfWriteOp.initialize(container);
Writer<Result> sfWriter = sfSink.createWriteOperation().createWriter(container);
sfWriter.open("uid1");
// Write one record.
for (IndexedRecord r : recordsToClean) {
sfWriter.write(r);
}
// Finish the Writer, WriteOperation and Sink.
Result wr1 = sfWriter.close();
sfWriteOp.finalize(Arrays.asList(wr1), container);
}
}
use of org.talend.components.salesforce.tsalesforceinput.TSalesforceInputProperties in project components by Talend.
the class SalesforceWriterTestIT method testSinkAllWithStringValue.
/*
* With current API like date/datetime/int/.... string value can't be write to server side So we need convert the field
* value type.
*/
@Test
public void testSinkAllWithStringValue() throws Exception {
// Component framework objects.
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Event");
sfProps.module.main.schema.setValue(SCHEMA_INSERT_EVENT);
sfProps.ceaseForError.setValue(true);
// Automatically generate the out schemas.
sfProps.module.schemaListener.afterSchema();
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
List<IndexedRecord> records = new ArrayList<>();
String random = createNewRandom();
IndexedRecord r1 = new GenericData.Record(SCHEMA_INSERT_EVENT);
r1.put(0, "2011-02-02T02:02:02");
r1.put(1, "2011-02-02T22:02:02.000Z");
r1.put(2, "2011-02-02");
r1.put(3, "1200");
r1.put(4, "true");
r1.put(5, random);
// Rejected and successful writes are reset on the next record.
IndexedRecord r2 = new GenericData.Record(SCHEMA_INSERT_EVENT);
r2.put(0, "2016-02-02T02:02:02.000Z");
r2.put(1, "2016-02-02T12:02:02");
r2.put(2, "2016-02-02");
r2.put(3, "600");
r2.put(4, "0");
r2.put(5, random);
records.add(r1);
records.add(r2);
SalesforceSink salesforceSink = new SalesforceSink();
salesforceSink.initialize(adaptor, sfProps);
salesforceSink.validate(adaptor);
Writer<Result> batchWriter = salesforceSink.createWriteOperation().createWriter(adaptor);
writeRows(batchWriter, records);
assertEquals(2, ((SalesforceWriter) batchWriter).getSuccessfulWrites().size());
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
sfInputProps.copyValuesFrom(sfProps);
sfInputProps.condition.setValue("Subject = '" + random + "' ORDER BY DurationInMinutes ASC");
sfInputProps.module.main.schema.setValue(SCHEMA_INPUT_AND_DELETE_EVENT);
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
try {
assertEquals(2, inpuRecords.size());
IndexedRecord inputRecords_1 = inpuRecords.get(0);
IndexedRecord inputRecords_2 = inpuRecords.get(1);
assertEquals(random, inputRecords_1.get(6));
assertEquals(random, inputRecords_2.get(6));
// we use containsInAnyOrder because we are not garanteed to have the same order every run.
assertThat(Arrays.asList("2011-02-02T02:02:02.000Z", "2016-02-02T02:02:02.000Z"), containsInAnyOrder(inputRecords_1.get(1), inputRecords_2.get(1)));
assertThat(Arrays.asList("2011-02-02T22:02:02.000Z", "2016-02-02T12:02:02.000Z"), containsInAnyOrder(inputRecords_1.get(2), inputRecords_2.get(2)));
assertThat(Arrays.asList("2011-02-02", "2016-02-02"), containsInAnyOrder(inputRecords_1.get(3), inputRecords_2.get(3)));
assertThat(Arrays.asList("1200", "600"), containsInAnyOrder(inputRecords_1.get(4), inputRecords_2.get(4)));
assertThat(Arrays.asList("true", "false"), containsInAnyOrder(inputRecords_1.get(5), inputRecords_2.get(5)));
} finally {
deleteRows(inpuRecords, sfInputProps);
}
}
Aggregations