use of org.talend.components.simplefileio.s3.output.S3OutputProperties in project components by Talend.
the class S3SourceOrSinkTestIT method validate_fail.
@Test
public void validate_fail() {
S3OutputProperties properties = PropertiesPreparer.createS3OtuputProperties();
S3DatastoreProperties datastore = properties.getDatasetProperties().getDatastoreProperties();
datastore.secretKey.setValue("wrongone");
runtime.initialize(null, properties);
ValidationResult result = runtime.validate(null);
org.junit.Assert.assertEquals(result.getMessage(), ValidationResult.Result.ERROR, result.getStatus());
}
use of org.talend.components.simplefileio.s3.output.S3OutputProperties in project components by Talend.
the class S3OutputRuntimeTestIT method testCsv_merge.
@Test
public void testCsv_merge() throws IOException {
S3DatasetProperties datasetProps = s3.createS3DatasetProperties();
datasetProps.format.setValue(SimpleFileIOFormat.CSV);
datasetProps.recordDelimiter.setValue(SimpleFileIODatasetProperties.RecordDelimiterType.LF);
datasetProps.fieldDelimiter.setValue(SimpleFileIODatasetProperties.FieldDelimiterType.SEMICOLON);
S3OutputProperties outputProperties = new S3OutputProperties("out");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProps);
outputProperties.mergeOutput.setValue(true);
// Create the runtime.
S3OutputRuntime runtime = new S3OutputRuntime();
runtime.initialize(null, outputProperties);
// Use the runtime in a Spark pipeline to test.
final Pipeline p = spark.createPipeline();
PCollection<IndexedRecord> input = //
p.apply(//
Create.of(//
ConvertToIndexedRecord.convertToAvro(new String[] { "1", "one" }), //
ConvertToIndexedRecord.convertToAvro(new String[] { "2", "two" })));
input.apply(runtime);
// And run the test.
p.run().waitUntilFinish();
FileSystem s3FileSystem = S3Connection.createFileSystem(datasetProps);
MiniDfsResource.assertReadFile(s3FileSystem, s3.getS3APath(datasetProps), "1;one", "2;two");
MiniDfsResource.assertFileNumber(s3FileSystem, s3.getS3APath(datasetProps), 1);
}
use of org.talend.components.simplefileio.s3.output.S3OutputProperties in project components by Talend.
the class S3OutputRuntimeTestIT method testAvro_merge.
@Test
public void testAvro_merge() throws IOException {
S3DatasetProperties datasetProps = s3.createS3DatasetProperties();
datasetProps.format.setValue(SimpleFileIOFormat.AVRO);
S3OutputProperties outputProperties = new S3OutputProperties("out");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProps);
outputProperties.mergeOutput.setValue(true);
// Create the runtime.
S3OutputRuntime runtime = new S3OutputRuntime();
runtime.initialize(null, outputProperties);
// Use the runtime in a Spark pipeline to test.
final Pipeline p = spark.createPipeline();
PCollection<IndexedRecord> input = //
p.apply(//
Create.of(//
ConvertToIndexedRecord.convertToAvro(new String[] { "1", "one" }), //
ConvertToIndexedRecord.convertToAvro(new String[] { "2", "two" })));
input.apply(runtime);
// And run the test.
p.run().waitUntilFinish();
FileSystem s3FileSystem = S3Connection.createFileSystem(datasetProps);
MiniDfsResource.assertReadAvroFile(s3FileSystem, s3.getS3APath(datasetProps), new HashSet<IndexedRecord>(//
Arrays.asList(//
ConvertToIndexedRecord.convertToAvro(new String[] { "1", "one" }), ConvertToIndexedRecord.convertToAvro(new String[] { "2", "two" }))), false);
MiniDfsResource.assertFileNumber(s3FileSystem, s3.getS3APath(datasetProps), 1);
}
use of org.talend.components.simplefileio.s3.output.S3OutputProperties in project components by Talend.
the class S3SparkRuntimeTestIT method test_noEncryption.
public void test_noEncryption(S3DatasetProperties datasetProps) throws IOException {
// The file that we will be creating.
RecordSet rs = getSimpleTestData(0);
// Configure the components.
S3OutputProperties outputProps = new S3OutputProperties("out");
outputProps.setDatasetProperties(datasetProps);
S3InputProperties inputProps = new S3InputProperties("in");
inputProps.setDatasetProperties(datasetProps);
List<IndexedRecord> actual = runRoundTripPipelines(rs.getAllData(), outputProps, inputProps);
List<IndexedRecord> expected = rs.getAllData();
assertThat(actual, containsInAnyOrder(expected.toArray()));
List<IndexedRecord> samples = getSample(datasetProps);
assertThat(samples, containsInAnyOrder(expected.toArray()));
Schema schema = getSchema(datasetProps);
assertEquals(expected.get(0).getSchema(), schema);
}
use of org.talend.components.simplefileio.s3.output.S3OutputProperties in project components by Talend.
the class S3OutputPropertiesTest method setup.
@Before
public void setup() {
properties = new S3OutputProperties("test");
S3DatastoreProperties datastoreProperties = new S3DatastoreProperties("test");
datastoreProperties.init();
S3DatasetProperties datasetProperties = new S3DatasetProperties("test");
datasetProperties.init();
datasetProperties.setDatastoreProperties(datastoreProperties);
properties.setDatasetProperties(datasetProperties);
properties.init();
}
Aggregations