use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class PutElasticsearchHttpRecordIT method testNoNullSuppresion.
@Test
public void testNoNullSuppresion() throws Exception {
recordReader.addRecord(1, new MapRecord(personSchema, new HashMap<String, Object>() {
{
put("name", "John Doe");
put("age", 48);
put("sport", null);
}
}));
List<Map<String, String>> attrs = new ArrayList<>();
Map<String, String> attr = new HashMap<>();
attr.put("doc_id", "1");
attrs.add(attr);
setupPut();
testFetch(attrs);
byte[] raw = FETCH_RUNNER.getContentAsByteArray(FETCH_RUNNER.getFlowFilesForRelationship(FetchElasticsearchHttp.REL_SUCCESS).get(0));
String val = new String(raw);
Map<String, Object> parsed = mapper.readValue(val, Map.class);
Assert.assertNotNull(parsed);
Map<String, Object> person = (Map) parsed.get("person");
Assert.assertNotNull(person);
Assert.assertTrue(person.containsKey("sport"));
Assert.assertNull(person.get("sport"));
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class AvroTypeUtil method normalizeValue.
/**
* Convert an Avro object to a normal Java objects for further processing.
* The counter-part method which convert a raw value to an Avro object is {@link #convertToAvroObject(Object, Schema, String)}
*/
private static Object normalizeValue(final Object value, final Schema avroSchema, final String fieldName) {
if (value == null) {
return null;
}
switch(avroSchema.getType()) {
case INT:
{
final LogicalType logicalType = avroSchema.getLogicalType();
if (logicalType == null) {
return value;
}
final String logicalName = logicalType.getName();
if (LOGICAL_TYPE_DATE.equals(logicalName)) {
// date logical name means that the value is number of days since Jan 1, 1970
return new java.sql.Date(TimeUnit.DAYS.toMillis((int) value));
} else if (LOGICAL_TYPE_TIME_MILLIS.equals(logicalName)) {
// time-millis logical name means that the value is number of milliseconds since midnight.
return new java.sql.Time((int) value);
}
break;
}
case LONG:
{
final LogicalType logicalType = avroSchema.getLogicalType();
if (logicalType == null) {
return value;
}
final String logicalName = logicalType.getName();
if (LOGICAL_TYPE_TIME_MICROS.equals(logicalName)) {
return new java.sql.Time(TimeUnit.MICROSECONDS.toMillis((long) value));
} else if (LOGICAL_TYPE_TIMESTAMP_MILLIS.equals(logicalName)) {
return new java.sql.Timestamp((long) value);
} else if (LOGICAL_TYPE_TIMESTAMP_MICROS.equals(logicalName)) {
return new java.sql.Timestamp(TimeUnit.MICROSECONDS.toMillis((long) value));
}
break;
}
case UNION:
if (value instanceof GenericData.Record) {
final GenericData.Record avroRecord = (GenericData.Record) value;
return normalizeValue(value, avroRecord.getSchema(), fieldName);
}
return convertUnionFieldValue(value, avroSchema, schema -> normalizeValue(value, schema, fieldName), fieldName);
case RECORD:
final GenericData.Record record = (GenericData.Record) value;
final Schema recordSchema = record.getSchema();
final List<Field> recordFields = recordSchema.getFields();
final Map<String, Object> values = new HashMap<>(recordFields.size());
for (final Field field : recordFields) {
final Object avroFieldValue = record.get(field.name());
final Object fieldValue = normalizeValue(avroFieldValue, field.schema(), fieldName + "/" + field.name());
values.put(field.name(), fieldValue);
}
final RecordSchema childSchema = AvroTypeUtil.createSchema(recordSchema);
return new MapRecord(childSchema, values);
case BYTES:
final ByteBuffer bb = (ByteBuffer) value;
final LogicalType logicalType = avroSchema.getLogicalType();
if (logicalType != null && LOGICAL_TYPE_DECIMAL.equals(logicalType.getName())) {
return new Conversions.DecimalConversion().fromBytes(bb, avroSchema, logicalType);
}
return AvroTypeUtil.convertByteArray(bb.array());
case FIXED:
final GenericFixed fixed = (GenericFixed) value;
return AvroTypeUtil.convertByteArray(fixed.bytes());
case ENUM:
return value.toString();
case NULL:
return null;
case STRING:
return value.toString();
case ARRAY:
final GenericData.Array<?> array = (GenericData.Array<?>) value;
final Object[] valueArray = new Object[array.size()];
for (int i = 0; i < array.size(); i++) {
final Schema elementSchema = avroSchema.getElementType();
valueArray[i] = normalizeValue(array.get(i), elementSchema, fieldName + "[" + i + "]");
}
return valueArray;
case MAP:
final Map<?, ?> avroMap = (Map<?, ?>) value;
final Map<String, Object> map = new HashMap<>(avroMap.size());
for (final Map.Entry<?, ?> entry : avroMap.entrySet()) {
Object obj = entry.getValue();
if (obj instanceof Utf8 || obj instanceof CharSequence) {
obj = obj.toString();
}
final String key = entry.getKey().toString();
obj = normalizeValue(obj, avroSchema.getValueType(), fieldName + "[" + key + "]");
map.put(key, obj);
}
return map;
}
return value;
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestStandardSchemaValidator method testExtraFields.
@Test
public void testExtraFields() {
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
final RecordSchema schema = new SimpleRecordSchema(fields);
final Map<String, Object> valueMap = new LinkedHashMap<>();
valueMap.put("id", 1);
valueMap.put("name", "John Doe");
Record record = new MapRecord(schema, valueMap, false, false);
final SchemaValidationContext allowExtraFieldsContext = new SchemaValidationContext(schema, true, true);
final SchemaValidationContext forbidExtraFieldsContext = new SchemaValidationContext(schema, false, false);
StandardSchemaValidator validator = new StandardSchemaValidator(allowExtraFieldsContext);
SchemaValidationResult result = validator.validate(record);
assertTrue(result.isValid());
assertNotNull(result.getValidationErrors());
assertTrue(result.getValidationErrors().isEmpty());
validator = new StandardSchemaValidator(forbidExtraFieldsContext);
result = validator.validate(record);
assertFalse(result.isValid());
assertNotNull(result.getValidationErrors());
final Collection<ValidationError> validationErrors = result.getValidationErrors();
assertEquals(1, validationErrors.size());
final ValidationError validationError = validationErrors.iterator().next();
assertEquals("/name", validationError.getFieldName().get());
System.out.println(validationError);
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestStandardSchemaValidator method testInvalidEmbeddedField.
@Test
public void testInvalidEmbeddedField() {
final List<RecordField> accountFields = new ArrayList<>();
accountFields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema accountSchema = new SimpleRecordSchema(accountFields);
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
fields.add(new RecordField("account", RecordFieldType.RECORD.getRecordDataType(accountSchema)));
final RecordSchema schema = new SimpleRecordSchema(fields);
final Map<String, Object> accountValues = new HashMap<>();
accountValues.put("name", "account-1");
accountValues.put("balance", "123.45");
final Record accountRecord = new MapRecord(accountSchema, accountValues);
final Map<String, Object> valueMap = new LinkedHashMap<>();
valueMap.put("id", 1);
valueMap.put("account", accountRecord);
Record record = new MapRecord(schema, valueMap, false, false);
final SchemaValidationContext strictValidationContext = new SchemaValidationContext(schema, false, true);
final SchemaValidationContext lenientValidationContext = new SchemaValidationContext(schema, false, false);
StandardSchemaValidator validator = new StandardSchemaValidator(strictValidationContext);
SchemaValidationResult result = validator.validate(record);
assertFalse(result.isValid());
assertNotNull(result.getValidationErrors());
assertEquals(1, result.getValidationErrors().size());
final ValidationError validationError = result.getValidationErrors().iterator().next();
assertEquals("/account/balance", validationError.getFieldName().get());
validator = new StandardSchemaValidator(lenientValidationContext);
result = validator.validate(record);
assertTrue(result.isValid());
assertNotNull(result.getValidationErrors());
assertTrue(result.getValidationErrors().isEmpty());
}
use of org.apache.nifi.serialization.record.MapRecord in project nifi by apache.
the class TestStandardSchemaValidator method testValidateCorrectSimpleTypesStrictValidation.
@Test
public void testValidateCorrectSimpleTypesStrictValidation() throws ParseException {
final List<RecordField> fields = new ArrayList<>();
for (final RecordFieldType fieldType : RecordFieldType.values()) {
if (fieldType == RecordFieldType.CHOICE) {
final List<DataType> possibleTypes = new ArrayList<>();
possibleTypes.add(RecordFieldType.INT.getDataType());
possibleTypes.add(RecordFieldType.LONG.getDataType());
fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getChoiceDataType(possibleTypes)));
} else if (fieldType == RecordFieldType.MAP) {
fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getMapDataType(RecordFieldType.INT.getDataType())));
} else {
fields.add(new RecordField(fieldType.name().toLowerCase(), fieldType.getDataType()));
}
}
final DateFormat df = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss.SSS");
df.setTimeZone(TimeZone.getTimeZone("gmt"));
final long time = df.parse("2017/01/01 17:00:00.000").getTime();
final Map<String, Object> intMap = new LinkedHashMap<>();
intMap.put("height", 48);
intMap.put("width", 96);
final RecordSchema schema = new SimpleRecordSchema(fields);
final Map<String, Object> valueMap = new LinkedHashMap<>();
valueMap.put("string", "string");
valueMap.put("boolean", true);
valueMap.put("byte", (byte) 1);
valueMap.put("char", 'c');
valueMap.put("short", (short) 8);
valueMap.put("int", 9);
valueMap.put("bigint", BigInteger.valueOf(8L));
valueMap.put("long", 8L);
valueMap.put("float", 8.0F);
valueMap.put("double", 8.0D);
valueMap.put("date", new Date(time));
valueMap.put("time", new Time(time));
valueMap.put("timestamp", new Timestamp(time));
valueMap.put("record", null);
valueMap.put("array", null);
valueMap.put("choice", 48L);
valueMap.put("map", intMap);
final Record record = new MapRecord(schema, valueMap);
final SchemaValidationContext validationContext = new SchemaValidationContext(schema, false, true);
final StandardSchemaValidator validator = new StandardSchemaValidator(validationContext);
final SchemaValidationResult result = validator.validate(record);
assertTrue(result.isValid());
assertNotNull(result.getValidationErrors());
assertTrue(result.getValidationErrors().isEmpty());
}
Aggregations