Search in sources :

Example 51 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestExecuteSQL method testWithDuplicateColumns.

@Test
public void testWithDuplicateColumns() throws SQLException {
    // remove previous test database, if any
    final File dbLocation = new File(DB_LOCATION);
    dbLocation.delete();
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table host1");
        stmt.execute("drop table host2");
    } catch (final SQLException sqle) {
    }
    stmt.execute("create table host1 (id integer not null, host varchar(45))");
    stmt.execute("create table host2 (id integer not null, host varchar(45))");
    stmt.execute("insert into host1 values(1,'host1')");
    stmt.execute("insert into host2 values(1,'host2')");
    stmt.execute("select a.host as hostA,b.host as hostB from host1 a join host2 b on b.id=a.id");
    runner.setIncomingConnection(false);
    runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, "select a.host as hostA,b.host as hostB from host1 a join host2 b on b.id=a.id");
    runner.run();
    runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1);
    runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS).get(0).assertAttributeEquals(ExecuteSQL.RESULT_ROW_COUNT, "1");
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) File(java.io.File) MockFlowFile(org.apache.nifi.util.MockFlowFile) Test(org.junit.Test)

Example 52 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestExecuteSQL method invokeOnTrigger.

public void invokeOnTrigger(final Integer queryTimeout, final String query, final boolean incomingFlowFile, final Map<String, String> attrs, final boolean setQueryProperty) throws InitializationException, ClassNotFoundException, SQLException, IOException {
    if (queryTimeout != null) {
        runner.setProperty(ExecuteSQL.QUERY_TIMEOUT, queryTimeout.toString() + " secs");
    }
    // remove previous test database, if any
    final File dbLocation = new File(DB_LOCATION);
    dbLocation.delete();
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    TestJdbcHugeStream.loadTestData2Database(con, 100, 200, 100);
    LOGGER.info("test data loaded");
    // ResultSet size will be 1x200x100 = 20 000 rows
    // because of where PER.ID = ${person.id}
    final int nrOfRows = 20000;
    if (incomingFlowFile) {
        // incoming FlowFile content is not used, but attributes are used
        final Map<String, String> attributes = (attrs == null) ? new HashMap<>() : attrs;
        attributes.put("person.id", "10");
        if (!setQueryProperty) {
            runner.enqueue(query.getBytes(), attributes);
        } else {
            runner.enqueue("Hello".getBytes(), attributes);
        }
    }
    if (setQueryProperty) {
        runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, query);
    }
    runner.run();
    runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 1);
    runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, ExecuteSQL.RESULT_QUERY_DURATION);
    runner.assertAllFlowFilesContainAttribute(ExecuteSQL.REL_SUCCESS, ExecuteSQL.RESULT_ROW_COUNT);
    final List<MockFlowFile> flowfiles = runner.getFlowFilesForRelationship(ExecuteSQL.REL_SUCCESS);
    final InputStream in = new ByteArrayInputStream(flowfiles.get(0).toByteArray());
    final DatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
    try (DataFileStream<GenericRecord> dataFileReader = new DataFileStream<>(in, datumReader)) {
        GenericRecord record = null;
        long recordsFromStream = 0;
        while (dataFileReader.hasNext()) {
            // Reuse record object by passing it to next(). This saves us from
            // allocating and garbage collecting many objects for files with
            // many items.
            record = dataFileReader.next(record);
            recordsFromStream += 1;
        }
        LOGGER.info("total nr of records from stream: " + recordsFromStream);
        assertEquals(nrOfRows, recordsFromStream);
    }
}
Also used : ByteArrayInputStream(org.fusesource.hawtbuf.ByteArrayInputStream) InputStream(java.io.InputStream) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Connection(java.sql.Connection) DataFileStream(org.apache.avro.file.DataFileStream) MockFlowFile(org.apache.nifi.util.MockFlowFile) ByteArrayInputStream(org.fusesource.hawtbuf.ByteArrayInputStream) DBCPService(org.apache.nifi.dbcp.DBCPService) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) MockFlowFile(org.apache.nifi.util.MockFlowFile)

Example 53 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestExecuteSQL method testWithSqlException.

@Test
public void testWithSqlException() throws SQLException {
    // remove previous test database, if any
    final File dbLocation = new File(DB_LOCATION);
    dbLocation.delete();
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_NO_ROWS");
    } catch (final SQLException sqle) {
    }
    stmt.execute("create table TEST_NO_ROWS (id integer)");
    runner.setIncomingConnection(false);
    // Try a valid SQL statement that will generate an error (val1 does not exist, e.g.)
    runner.setProperty(ExecuteSQL.SQL_SELECT_QUERY, "SELECT val1 FROM TEST_NO_ROWS");
    runner.run();
    // No incoming flow file containing a query, and an exception causes no outbound flowfile.
    // There should be no flow files on either relationship
    runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_FAILURE, 0);
    runner.assertAllFlowFilesTransferred(ExecuteSQL.REL_SUCCESS, 0);
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) File(java.io.File) MockFlowFile(org.apache.nifi.util.MockFlowFile) Test(org.junit.Test)

Example 54 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestGenerateTableFetch method testOnePartition.

@Test
public void testOnePartition() throws ClassNotFoundException, SQLException, InitializationException, IOException {
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, bucket integer not null)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (0, 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (1, 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "TEST_QUERY_DB_TABLE");
    runner.setIncomingConnection(false);
    runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "ID");
    // Set partition size to 0 so we can see that the flow file gets all rows
    runner.setProperty(GenerateTableFetch.PARTITION_SIZE, "0");
    runner.run();
    runner.assertAllFlowFilesTransferred(GenerateTableFetch.REL_SUCCESS, 1);
    runner.getFlowFilesForRelationship(GenerateTableFetch.REL_SUCCESS).get(0).assertContentEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2 ORDER BY ID");
    runner.clearTransferState();
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) Test(org.junit.Test)

Example 55 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestGenerateTableFetch method testBackwardsCompatibilityStateKeyDynamicTableStaticMaxValues.

@Test
public void testBackwardsCompatibilityStateKeyDynamicTableStaticMaxValues() throws Exception {
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, bucket integer not null)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (0, 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (1, 0)");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}");
    runner.setIncomingConnection(true);
    runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "id");
    runner.enqueue("".getBytes(), new HashMap<String, String>() {

        {
            put("tableName", "TEST_QUERY_DB_TABLE");
        }
    });
    // Pre-populate the state with a key for column name (not fully-qualified)
    StateManager stateManager = runner.getStateManager();
    stateManager.setState(new HashMap<String, String>() {

        {
            put("id", "0");
        }
    }, Scope.CLUSTER);
    // Pre-populate the column type map with an entry for id (not fully-qualified)
    processor.columnTypeMap.put("id", 4);
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    // Note there is no WHERE clause here. Because we are using dynamic tables, the old state key/value is not retrieved
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id <= 1 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray()));
    runner.clearTransferState();
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)");
    runner.enqueue("".getBytes(), new HashMap<String, String>() {

        {
            put("tableName", "TEST_QUERY_DB_TABLE");
            put("maxValueCol", "id");
        }
    });
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE id > 1 AND id <= 2 ORDER BY id FETCH NEXT 10000 ROWS ONLY", new String(flowFile.toByteArray()));
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) StateManager(org.apache.nifi.components.state.StateManager) SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) Matchers.anyString(org.mockito.Matchers.anyString) Test(org.junit.Test)

Aggregations

DBCPService (org.apache.nifi.dbcp.DBCPService)73 Connection (java.sql.Connection)61 Statement (java.sql.Statement)57 Test (org.junit.Test)57 SQLException (java.sql.SQLException)46 MockFlowFile (org.apache.nifi.util.MockFlowFile)28 HashMap (java.util.HashMap)25 ResultSet (java.sql.ResultSet)22 HiveDBCPService (org.apache.nifi.dbcp.hive.HiveDBCPService)21 File (java.io.File)18 TestRunner (org.apache.nifi.util.TestRunner)18 Matchers.anyString (org.mockito.Matchers.anyString)14 InputStream (java.io.InputStream)13 ProcessException (org.apache.nifi.processor.exception.ProcessException)10 ByteArrayInputStream (org.fusesource.hawtbuf.ByteArrayInputStream)9 StateManager (org.apache.nifi.components.state.StateManager)7 HashSet (java.util.HashSet)6 Map (java.util.Map)6 FlowFile (org.apache.nifi.flowfile.FlowFile)6 IOException (java.io.IOException)5