use of org.apache.nifi.serialization.record.MockRecordParser in project nifi by apache.
the class PutDruidRecordTest method setUp.
@Before
public void setUp() throws Exception {
runner = TestRunners.newTestRunner(PutDruidRecord.class);
druidTranquilityController = new MockDruidTranquilityController(2, 3);
recordReader = new MockRecordParser();
recordWriter = new MockRecordWriter(null, true, 2);
runner.addControllerService("reader", recordReader);
runner.enableControllerService(recordReader);
runner.addControllerService("writer", recordWriter);
runner.enableControllerService(recordWriter);
runner.addControllerService("tranquility", druidTranquilityController);
runner.setProperty(druidTranquilityController, DruidTranquilityController.DATASOURCE, "test");
runner.setProperty(druidTranquilityController, DruidTranquilityController.ZOOKEEPER_CONNECTION_STRING, "localhost:2181");
runner.setProperty(druidTranquilityController, DruidTranquilityController.AGGREGATOR_JSON, "[{\"type\": \"count\", \"name\": \"count\"}]");
runner.setProperty(druidTranquilityController, DruidTranquilityController.DIMENSIONS_LIST, "dim1,dim2");
runner.assertValid(druidTranquilityController);
runner.enableControllerService(druidTranquilityController);
runner.setProperty(PutDruidRecord.RECORD_READER_FACTORY, "reader");
runner.setProperty(PutDruidRecord.RECORD_WRITER_FACTORY, "writer");
runner.setProperty(PutDruidRecord.DRUID_TRANQUILITY_SERVICE, "tranquility");
}
use of org.apache.nifi.serialization.record.MockRecordParser in project nifi by apache.
the class TestPutElasticsearchHttpRecord method generateTestData.
private void generateTestData() throws IOException {
final MockRecordParser parser = new MockRecordParser();
try {
runner.addControllerService("parser", parser);
} catch (InitializationException e) {
throw new IOException(e);
}
runner.enableControllerService(parser);
runner.setProperty(PutElasticsearchHttpRecord.RECORD_READER, "parser");
parser.addSchemaField("id", RecordFieldType.INT);
parser.addSchemaField("name", RecordFieldType.STRING);
parser.addSchemaField("code", RecordFieldType.INT);
parser.addRecord(1, "rec1", 101);
parser.addRecord(2, "rec2", 102);
parser.addRecord(3, "rec3", 103);
parser.addRecord(4, "rec4", 104);
}
use of org.apache.nifi.serialization.record.MockRecordParser in project nifi by apache.
the class PutMongoRecordIT method setup.
@Before
public void setup() throws Exception {
super.setup(PutMongoRecord.class);
recordReader = new MockRecordParser();
}
use of org.apache.nifi.serialization.record.MockRecordParser in project nifi by apache.
the class PutParquetTest method configure.
private void configure(final PutParquet putParquet, final int numUsers) throws InitializationException {
testRunner = TestRunners.newTestRunner(putParquet);
testRunner.setProperty(PutParquet.HADOOP_CONFIGURATION_RESOURCES, TEST_CONF_PATH);
testRunner.setProperty(PutParquet.DIRECTORY, DIRECTORY);
readerFactory = new MockRecordParser();
final RecordSchema recordSchema = AvroTypeUtil.createSchema(schema);
for (final RecordField recordField : recordSchema.getFields()) {
readerFactory.addSchemaField(recordField.getFieldName(), recordField.getDataType().getFieldType());
}
for (int i = 0; i < numUsers; i++) {
readerFactory.addRecord("name" + i, i, "blue" + i);
}
testRunner.addControllerService("mock-reader-factory", readerFactory);
testRunner.enableControllerService(readerFactory);
testRunner.setProperty(PutParquet.RECORD_READER, "mock-reader-factory");
}
use of org.apache.nifi.serialization.record.MockRecordParser in project nifi by apache.
the class TestConvertRecord method testReadFailure.
@Test
public void testReadFailure() throws InitializationException {
final MockRecordParser readerService = new MockRecordParser(2);
final MockRecordWriter writerService = new MockRecordWriter("header", false);
final TestRunner runner = TestRunners.newTestRunner(ConvertRecord.class);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ConvertRecord.RECORD_READER, "reader");
runner.setProperty(ConvertRecord.RECORD_WRITER, "writer");
readerService.addSchemaField("name", RecordFieldType.STRING);
readerService.addSchemaField("age", RecordFieldType.INT);
readerService.addRecord("John Doe", 48);
readerService.addRecord("Jane Doe", 47);
readerService.addRecord("Jimmy Doe", 14);
final MockFlowFile original = runner.enqueue("hello");
runner.run();
// Original FlowFile should be routed to 'failure' relationship without modification
runner.assertAllFlowFilesTransferred(ConvertRecord.REL_FAILURE, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(ConvertRecord.REL_FAILURE).get(0);
assertTrue(original == out);
}
Aggregations