use of org.talend.components.elasticsearch.output.ElasticsearchOutputProperties in project components by Talend.
the class ElasticsearchBeamRuntimeTestIT method filterTest.
@Test
public void filterTest() throws MalformedURLException {
final String TYPE_NAME = "filtertest";
List<String> records = Arrays.asList("r1", "r2", "r3", "q1", "q2");
List<String> expectedRecords = Arrays.asList("r1", "r2", "r3");
List<IndexedRecord> expectedRecord = new ArrayList<>();
for (String record : expectedRecords) {
expectedRecord.add(ConvertToIndexedRecord.convertToAvro(record));
}
List<IndexedRecord> avroRecords = new ArrayList<>();
for (String record : records) {
avroRecords.add(ConvertToIndexedRecord.convertToAvro(record));
}
ElasticsearchDatasetProperties datasetProperties = new ElasticsearchDatasetProperties("datasetProperties");
datasetProperties.init();
datasetProperties.setDatastoreProperties(datastoreProperties);
datasetProperties.index.setValue(INDEX_NAME);
datasetProperties.type.setValue(TYPE_NAME);
ElasticsearchOutputProperties outputProperties = new ElasticsearchOutputProperties("outputProperties");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProperties);
ElasticsearchOutputRuntime outputRuntime = new ElasticsearchOutputRuntime();
outputRuntime.initialize(null, outputProperties);
PCollection<IndexedRecord> inputRecords = (PCollection<IndexedRecord>) pipeline.apply(Create.of(avroRecords).withCoder(LazyAvroCoder.of()));
inputRecords.apply(outputRuntime);
pipeline.run();
ElasticsearchTestUtils.upgradeIndexAndGetCurrentNumDocs(INDEX_NAME, TYPE_NAME, client);
// input pipeline start
ElasticsearchInputProperties inputProperties = new ElasticsearchInputProperties("inputProperties");
inputProperties.init();
inputProperties.setDatasetProperties(datasetProperties);
inputProperties.query.setValue("{\"query\":{\"regexp\":{\"field\":\"r[1-3]\"}}}");
ElasticsearchInputRuntime inputRuntime = new ElasticsearchInputRuntime();
inputRuntime.initialize(null, inputProperties);
PCollection<IndexedRecord> outputRecords = pipeline.apply(inputRuntime);
PAssert.that(outputRecords).containsInAnyOrder(expectedRecord);
pipeline.run();
}
use of org.talend.components.elasticsearch.output.ElasticsearchOutputProperties in project components by Talend.
the class ElasticsearchBeamRuntimeTestIT method getSampleTest.
@Test
public void getSampleTest() {
final String TYPE_NAME = "getsampletest";
List<String> records = Arrays.asList("r1", "r2", "r3");
List<IndexedRecord> avroRecords = new ArrayList<>();
for (String record : records) {
avroRecords.add(ConvertToIndexedRecord.convertToAvro(record));
}
ElasticsearchDatasetProperties datasetProperties = new ElasticsearchDatasetProperties("datasetProperties");
datasetProperties.init();
datasetProperties.setDatastoreProperties(datastoreProperties);
datasetProperties.index.setValue(INDEX_NAME);
datasetProperties.type.setValue(TYPE_NAME);
ElasticsearchOutputProperties outputProperties = new ElasticsearchOutputProperties("outputProperties");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProperties);
ElasticsearchOutputRuntime outputRuntime = new ElasticsearchOutputRuntime();
outputRuntime.initialize(null, outputProperties);
PCollection<IndexedRecord> inputRecords = (PCollection<IndexedRecord>) pipeline.apply(Create.of(avroRecords).withCoder(LazyAvroCoder.of()));
inputRecords.apply(outputRuntime);
pipeline.run();
ElasticsearchTestUtils.upgradeIndexAndGetCurrentNumDocs(INDEX_NAME, TYPE_NAME, client);
ElasticsearchDatasetRuntime datasetRuntime = new ElasticsearchDatasetRuntime();
datasetRuntime.initialize(null, datasetProperties);
final List<IndexedRecord> samples = new ArrayList<>();
datasetRuntime.getSample(3, new Consumer<IndexedRecord>() {
@Override
public void accept(IndexedRecord indexedRecord) {
samples.add(indexedRecord);
}
});
compareListIndexedRecord(samples, avroRecords);
assertThat(samples.size(), is(3));
}
use of org.talend.components.elasticsearch.output.ElasticsearchOutputProperties in project components by Talend.
the class ElasticsearchBeamRuntimeTestIT method getSampleNumericalTest.
@Test
public void getSampleNumericalTest() {
final String TYPE_NAME = "getsamplenumericaltest";
List<Integer> records = Arrays.asList(1, 5, 50, 555);
List<IndexedRecord> avroRecords = new ArrayList<>();
for (Integer record : records) {
avroRecords.add(ConvertToIndexedRecord.convertToAvro(record));
}
ElasticsearchDatasetProperties datasetProperties = new ElasticsearchDatasetProperties("datasetProperties");
datasetProperties.init();
datasetProperties.setDatastoreProperties(datastoreProperties);
datasetProperties.index.setValue(INDEX_NAME);
datasetProperties.type.setValue(TYPE_NAME);
ElasticsearchOutputProperties outputProperties = new ElasticsearchOutputProperties("outputProperties");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProperties);
ElasticsearchOutputRuntime outputRuntime = new ElasticsearchOutputRuntime();
outputRuntime.initialize(null, outputProperties);
PCollection<IndexedRecord> inputRecords = (PCollection<IndexedRecord>) pipeline.apply(Create.of(avroRecords).withCoder(LazyAvroCoder.of()));
inputRecords.apply(outputRuntime);
pipeline.run();
ElasticsearchTestUtils.upgradeIndexAndGetCurrentNumDocs(INDEX_NAME, TYPE_NAME, client);
ElasticsearchDatasetRuntime datasetRuntime = new ElasticsearchDatasetRuntime();
datasetRuntime.initialize(null, datasetProperties);
final List<IndexedRecord> samples = new ArrayList<>();
datasetRuntime.getSample(4, new Consumer<IndexedRecord>() {
@Override
public void accept(IndexedRecord indexedRecord) {
samples.add(indexedRecord);
}
});
compareListIndexedRecord(samples, avroRecords);
assertThat(samples.size(), is(4));
}
use of org.talend.components.elasticsearch.output.ElasticsearchOutputProperties in project components by Talend.
the class ElasticsearchBeamRuntimeTestIT method basicTest.
@Test
public void basicTest() throws MalformedURLException {
final String TYPE_NAME = "basictest";
List<String> records = Arrays.asList("r1", "r2", "r3");
List<IndexedRecord> avroRecords = new ArrayList<>();
for (String record : records) {
avroRecords.add(ConvertToIndexedRecord.convertToAvro(record));
}
ElasticsearchDatasetProperties datasetProperties = new ElasticsearchDatasetProperties("datasetProperties");
datasetProperties.init();
datasetProperties.setDatastoreProperties(datastoreProperties);
datasetProperties.index.setValue(INDEX_NAME);
datasetProperties.type.setValue(TYPE_NAME);
ElasticsearchOutputProperties outputProperties = new ElasticsearchOutputProperties("outputProperties");
outputProperties.init();
outputProperties.setDatasetProperties(datasetProperties);
ElasticsearchOutputRuntime outputRuntime = new ElasticsearchOutputRuntime();
outputRuntime.initialize(null, outputProperties);
PCollection<IndexedRecord> inputRecords = (PCollection<IndexedRecord>) pipeline.apply(Create.of(avroRecords).withCoder(LazyAvroCoder.of()));
inputRecords.apply(outputRuntime);
pipeline.run();
ElasticsearchTestUtils.upgradeIndexAndGetCurrentNumDocs(INDEX_NAME, TYPE_NAME, client);
// input pipeline start
ElasticsearchInputProperties inputProperties = new ElasticsearchInputProperties("inputProperties");
inputProperties.init();
inputProperties.setDatasetProperties(datasetProperties);
ElasticsearchInputRuntime inputRuntime = new ElasticsearchInputRuntime();
inputRuntime.initialize(null, inputProperties);
PCollection<IndexedRecord> outputRecords = pipeline.apply(inputRuntime);
PAssert.that(outputRecords).containsInAnyOrder(avroRecords);
pipeline.run();
}
Aggregations