use of org.opensearch.ingest.IngestDocument in project OpenSearch by opensearch-project.
the class AppendProcessorTests method testAppendingToListWithDuplicatesDisallowed.
public void testAppendingToListWithDuplicatesDisallowed() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
int size = randomIntBetween(0, 10);
List<String> list = Stream.generate(() -> randomAlphaOfLengthBetween(1, 10)).limit(size).collect(Collectors.toList());
String originalField = RandomDocumentPicks.addRandomField(random(), ingestDocument, list);
List<String> expectedValues = new ArrayList<>(list);
List<String> existingValues = randomSubsetOf(list);
// generate new values
int nonexistingValuesSize = randomIntBetween(0, 10);
Set<String> newValues = Stream.generate(() -> randomAlphaOfLengthBetween(1, 10)).limit(nonexistingValuesSize).collect(Collectors.toSet());
// create a set using the new values making sure there are no overlapping values already present in the existing values
Set<String> nonexistingValues = Sets.difference(newValues, new HashSet<>(list));
List<String> valuesToAppend = new ArrayList<>(existingValues);
valuesToAppend.addAll(nonexistingValues);
expectedValues.addAll(nonexistingValues);
Collections.sort(valuesToAppend);
// attempt to append both new and existing values
Processor appendProcessor = createAppendProcessor(originalField, valuesToAppend, false);
appendProcessor.execute(ingestDocument);
List<?> fieldValue = ingestDocument.getFieldValue(originalField, List.class);
assertThat(fieldValue, sameInstance(list));
assertThat(fieldValue, containsInAnyOrder(expectedValues.toArray()));
}
use of org.opensearch.ingest.IngestDocument in project OpenSearch by opensearch-project.
the class AppendProcessorTests method testConvertScalarToList.
public void testConvertScalarToList() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
Scalar scalar = randomFrom(Scalar.values());
Object initialValue = scalar.randomValue();
String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, initialValue);
List<Object> values = new ArrayList<>();
Processor appendProcessor;
if (randomBoolean()) {
Object value = scalar.randomValue();
values.add(value);
appendProcessor = createAppendProcessor(field, value, true);
} else {
int valuesSize = randomIntBetween(0, 10);
for (int i = 0; i < valuesSize; i++) {
values.add(scalar.randomValue());
}
appendProcessor = createAppendProcessor(field, values, true);
}
appendProcessor.execute(ingestDocument);
List<?> fieldValue = ingestDocument.getFieldValue(field, List.class);
assertThat(fieldValue.size(), equalTo(values.size() + 1));
assertThat(fieldValue.get(0), equalTo(initialValue));
for (int i = 1; i < values.size() + 1; i++) {
assertThat(fieldValue.get(i), equalTo(values.get(i - 1)));
}
}
use of org.opensearch.ingest.IngestDocument in project OpenSearch by opensearch-project.
the class AppendProcessorTests method testAppendMetadataExceptVersion.
public void testAppendMetadataExceptVersion() throws Exception {
// here any metadata field value becomes a list, which won't make sense in most of the cases,
// but support for append is streamlined like for set so we test it
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING);
List<String> values = new ArrayList<>();
Processor appendProcessor;
if (randomBoolean()) {
String value = randomAlphaOfLengthBetween(1, 10);
values.add(value);
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), value, true);
} else {
int valuesSize = randomIntBetween(0, 10);
for (int i = 0; i < valuesSize; i++) {
values.add(randomAlphaOfLengthBetween(1, 10));
}
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), values, true);
}
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetadata.getFieldName());
appendProcessor.execute(ingestDocument);
List<?> list = ingestDocument.getFieldValue(randomMetadata.getFieldName(), List.class);
if (initialValue == null) {
assertThat(list, equalTo(values));
} else {
assertThat(list.size(), equalTo(values.size() + 1));
assertThat(list.get(0), equalTo(initialValue));
for (int i = 1; i < list.size(); i++) {
assertThat(list.get(i), equalTo(values.get(i - 1)));
}
}
}
use of org.opensearch.ingest.IngestDocument in project OpenSearch by opensearch-project.
the class CsvProcessorTests method testEmptyFields.
public void testEmptyFields() {
int numItems = randomIntBetween(5, 10);
Map<String, String> items = new LinkedHashMap<>();
for (int i = 0; i < numItems; i++) {
items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10));
}
String[] headers = items.keySet().toArray(new String[numItems]);
String csv = items.values().stream().map(v -> quote + v + quote).limit(numItems - 1).skip(3).collect(Collectors.joining(separator + ""));
IngestDocument ingestDocument = processDocument(headers, "" + separator + "" + separator + "" + separator + csv + separator + separator + "abc");
items.keySet().stream().limit(3).forEach(key -> assertFalse(ingestDocument.hasField(key)));
items.entrySet().stream().limit(numItems - 1).skip(3).forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class)));
items.keySet().stream().skip(numItems - 1).forEach(key -> assertFalse(ingestDocument.hasField(key)));
}
use of org.opensearch.ingest.IngestDocument in project OpenSearch by opensearch-project.
the class CsvProcessorTests method testLessFieldsThanHeaders.
public void testLessFieldsThanHeaders() {
int numItems = randomIntBetween(4, 10);
Map<String, String> items = new LinkedHashMap<>();
for (int i = 0; i < numItems; i++) {
items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10));
}
String[] headers = items.keySet().toArray(new String[numItems]);
String csv = items.values().stream().map(v -> quote + v + quote).limit(3).collect(Collectors.joining(separator + ""));
IngestDocument ingestDocument = processDocument(headers, csv);
items.keySet().stream().skip(3).forEach(key -> assertFalse(ingestDocument.hasField(key)));
items.entrySet().stream().limit(3).forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class)));
}
Aggregations