use of org.talend.components.salesforce.tsalesforceoutput.TSalesforceOutputDefinition in project components by Talend.
the class SalesforceWriterTestIT method testUploadAttachment.
@Test
public void testUploadAttachment() throws Throwable {
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Attachment");
sfProps.module.main.schema.setValue(SCHEMA_ATTACHMENT);
sfProps.ceaseForError.setValue(true);
sfProps.module.schemaListener.afterSchema();
List records = new ArrayList<IndexedRecord>();
String random = String.valueOf(createNewRandom());
LOGGER.debug("Getting the ParentId for attachment reocrds...");
String parentId = getFirstCreatedAccountRecordId();
LOGGER.debug("ParentId for attachments is:" + parentId);
IndexedRecord r1 = new GenericData.Record(SCHEMA_ATTACHMENT);
r1.put(0, "attachment_1_" + random + ".txt");
r1.put(1, "VGhpcyBpcyBhIHRlc3QgZmlsZSAxICE=");
r1.put(2, "text/plain");
r1.put(3, parentId);
IndexedRecord r2 = new GenericData.Record(SCHEMA_ATTACHMENT);
r2.put(0, "attachment_2_" + random + ".txt");
r2.put(1, "QmFzZSA2NC1lbmNvZGVkIGJpbmFyeSBkYXRhLiBGaWVsZHMgb2YgdGhpcyB0eXBlIGFyZSB1c2VkIGZvciBzdG9yaW5" + "nIGJpbmFyeSBmaWxlcyBpbiBBdHRhY2htZW50IHJlY29yZHMsIERvY3VtZW50IHJlY29yZHMsIGFuZCBTY2" + "9udHJvbCByZWNvcmRzLiBJbiB0aGVzZSBvYmplY3RzLCB0aGUgQm9keSBvciBCaW5hcnkgZmllbGQgY29udGFpbn" + "MgdGhlIChiYXNlNjQgZW5jb2RlZCkgZGF0YSwgd2hpbGUgdGhlIEJvZHlMZW5ndGggZmllbGQgZGVmaW5lcyB0aGU" + "gbGVuZ3RoIG9mIHRoZSBkYXRhIGluIHRoZSBCb2R5IG9yIEJpbmFyeSBmaWVsZC4gSW4gdGhlIERvY3VtZW50IG9" + "iamVjdCwgeW91IGNhbiBzcGVjaWZ5IGEgVVJMIHRvIHRoZSBkb2N1bWVudCBpbnN0ZWFkIG9mIHN0b3JpbmcgdGh" + "lIGRvY3VtZW50IGRpcmVjdGx5IGluIHRoZSByZWNvcmQu");
r2.put(2, "text/plain");
r2.put(3, parentId);
records.add(r1);
records.add(r2);
SalesforceSink salesforceSink = new SalesforceSink();
salesforceSink.initialize(adaptor, sfProps);
salesforceSink.validate(adaptor);
Writer<Result> batchWriter = salesforceSink.createWriteOperation().createWriter(adaptor);
LOGGER.debug("Uploading 2 attachments ...");
writeRows(batchWriter, records);
assertEquals(2, ((SalesforceWriter) batchWriter).getSuccessfulWrites().size());
LOGGER.debug("2 attachments uploaded successfully!");
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
sfInputProps.copyValuesFrom(sfProps);
sfInputProps.condition.setValue("Name = 'attachment_1_" + random + ".txt' or Name = 'attachment_2_" + random + ".txt'");
sfInputProps.module.main.schema.setValue(SCHEMA_ATTACHMENT);
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
try {
assertEquals(2, inpuRecords.size());
IndexedRecord inputRecords_1 = null;
IndexedRecord inputRecords_2 = null;
if (("attachment_1_" + random + ".txt").equals(String.valueOf(inpuRecords.get(0).get(0)))) {
inputRecords_1 = inpuRecords.get(0);
inputRecords_2 = inpuRecords.get(1);
} else {
inputRecords_1 = inpuRecords.get(1);
inputRecords_2 = inpuRecords.get(0);
}
assertEquals("attachment_1_" + random + ".txt", inputRecords_1.get(0));
assertEquals("attachment_2_" + random + ".txt", inputRecords_2.get(0));
assertEquals("VGhpcyBpcyBhIHRlc3QgZmlsZSAxICE=", inputRecords_1.get(1));
assertEquals("Base 64-encoded binary data. Fields of this type are used for storing binary files in Attachment " + "records, Document records, and Scontrol records. In these objects, the Body or Binary " + "field contains the (base64 encoded) data, while the BodyLength field defines the length" + " of the data in the Body or Binary field. In the Document object, you can specify a " + "URL to the document instead of storing the document directly in the record.", new String(Base64.decode(((String) inputRecords_2.get(1)).getBytes())));
assertEquals("text/plain", inputRecords_1.get(2));
assertEquals("text/plain", inputRecords_2.get(2));
assertEquals(parentId, inputRecords_1.get(3));
assertEquals(parentId, inputRecords_2.get(3));
assertNotNull(inputRecords_1.get(4));
assertNotNull(inputRecords_2.get(4));
} finally {
deleteRows(inpuRecords, sfInputProps);
}
}
use of org.talend.components.salesforce.tsalesforceoutput.TSalesforceOutputDefinition in project components by Talend.
the class SalesforceWriterTestIT method cleanupAllRecords.
@AfterClass
public static void cleanupAllRecords() throws NoSuchElementException, IOException {
List<IndexedRecord> recordsToClean = new ArrayList<>();
String prefixToDelete = UNIQUE_NAME + "_" + UNIQUE_ID;
// Get the list of records that match the prefix to delete.
{
TSalesforceInputProperties sfProps = getSalesforceInputProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Account");
sfProps.module.main.schema.setValue(SCHEMA_UPDATE_ACCOUNT);
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
// Initialize the Source and Reader
SalesforceSource sfSource = new SalesforceSource();
sfSource.initialize(container, sfProps);
sfSource.validate(container);
int nameIndex = -1;
@SuppressWarnings("unchecked") Reader<IndexedRecord> sfReader = sfSource.createReader(container);
if (sfReader.start()) {
do {
IndexedRecord r = sfReader.getCurrent();
if (nameIndex == -1) {
nameIndex = r.getSchema().getField("Name").pos();
}
if (String.valueOf(r.get(nameIndex)).startsWith(prefixToDelete)) {
recordsToClean.add(r);
}
} while (sfReader.advance());
}
}
// Delete those records.
{
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.outputAction.setValue(OutputAction.DELETE);
sfProps.module.setValue("moduleName", "Account");
sfProps.module.main.schema.setValue(SCHEMA_UPDATE_ACCOUNT);
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
// Initialize the Sink, WriteOperation and Writer
SalesforceSink sfSink = new SalesforceSink();
sfSink.initialize(container, sfProps);
sfSink.validate(container);
SalesforceWriteOperation sfWriteOp = sfSink.createWriteOperation();
sfWriteOp.initialize(container);
Writer<Result> sfWriter = sfSink.createWriteOperation().createWriter(container);
sfWriter.open("uid1");
// Write one record.
for (IndexedRecord r : recordsToClean) {
sfWriter.write(r);
}
// Finish the Writer, WriteOperation and Sink.
Result wr1 = sfWriter.close();
sfWriteOp.finalize(Arrays.asList(wr1), container);
}
}
use of org.talend.components.salesforce.tsalesforceoutput.TSalesforceOutputDefinition in project components by Talend.
the class SalesforceWriterTestIT method testSinkAllWithStringValue.
/*
* With current API like date/datetime/int/.... string value can't be write to server side So we need convert the field
* value type.
*/
@Test
public void testSinkAllWithStringValue() throws Exception {
// Component framework objects.
ComponentDefinition sfDef = new TSalesforceOutputDefinition();
TSalesforceOutputProperties sfProps = (TSalesforceOutputProperties) sfDef.createRuntimeProperties();
SalesforceTestBase.setupProps(sfProps.connection, false);
sfProps.module.setValue("moduleName", "Event");
sfProps.module.main.schema.setValue(SCHEMA_INSERT_EVENT);
sfProps.ceaseForError.setValue(true);
// Automatically generate the out schemas.
sfProps.module.schemaListener.afterSchema();
DefaultComponentRuntimeContainerImpl container = new DefaultComponentRuntimeContainerImpl();
List<IndexedRecord> records = new ArrayList<>();
String random = createNewRandom();
IndexedRecord r1 = new GenericData.Record(SCHEMA_INSERT_EVENT);
r1.put(0, "2011-02-02T02:02:02");
r1.put(1, "2011-02-02T22:02:02.000Z");
r1.put(2, "2011-02-02");
r1.put(3, "1200");
r1.put(4, "true");
r1.put(5, random);
// Rejected and successful writes are reset on the next record.
IndexedRecord r2 = new GenericData.Record(SCHEMA_INSERT_EVENT);
r2.put(0, "2016-02-02T02:02:02.000Z");
r2.put(1, "2016-02-02T12:02:02");
r2.put(2, "2016-02-02");
r2.put(3, "600");
r2.put(4, "0");
r2.put(5, random);
records.add(r1);
records.add(r2);
SalesforceSink salesforceSink = new SalesforceSink();
salesforceSink.initialize(adaptor, sfProps);
salesforceSink.validate(adaptor);
Writer<Result> batchWriter = salesforceSink.createWriteOperation().createWriter(adaptor);
writeRows(batchWriter, records);
assertEquals(2, ((SalesforceWriter) batchWriter).getSuccessfulWrites().size());
TSalesforceInputProperties sfInputProps = getSalesforceInputProperties();
sfInputProps.copyValuesFrom(sfProps);
sfInputProps.condition.setValue("Subject = '" + random + "' ORDER BY DurationInMinutes ASC");
sfInputProps.module.main.schema.setValue(SCHEMA_INPUT_AND_DELETE_EVENT);
List<IndexedRecord> inpuRecords = readRows(sfInputProps);
try {
assertEquals(2, inpuRecords.size());
IndexedRecord inputRecords_1 = inpuRecords.get(0);
IndexedRecord inputRecords_2 = inpuRecords.get(1);
assertEquals(random, inputRecords_1.get(6));
assertEquals(random, inputRecords_2.get(6));
// we use containsInAnyOrder because we are not garanteed to have the same order every run.
assertThat(Arrays.asList("2011-02-02T02:02:02.000Z", "2016-02-02T02:02:02.000Z"), containsInAnyOrder(inputRecords_1.get(1), inputRecords_2.get(1)));
assertThat(Arrays.asList("2011-02-02T22:02:02.000Z", "2016-02-02T12:02:02.000Z"), containsInAnyOrder(inputRecords_1.get(2), inputRecords_2.get(2)));
assertThat(Arrays.asList("2011-02-02", "2016-02-02"), containsInAnyOrder(inputRecords_1.get(3), inputRecords_2.get(3)));
assertThat(Arrays.asList("1200", "600"), containsInAnyOrder(inputRecords_1.get(4), inputRecords_2.get(4)));
assertThat(Arrays.asList("true", "false"), containsInAnyOrder(inputRecords_1.get(5), inputRecords_2.get(5)));
} finally {
deleteRows(inpuRecords, sfInputProps);
}
}
Aggregations