Search in sources :

Example 61 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestGenerateTableFetch method testMultipleColumnTypeMissing.

@Test
public void testMultipleColumnTypeMissing() throws ClassNotFoundException, SQLException, InitializationException, IOException {
    // Load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE");
        stmt.execute("drop table TEST_QUERY_DB_TABLE_2");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    // Create multiple table to invoke processor state stored
    stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, bucket integer not null)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (1, 0)");
    stmt.execute("create table TEST_QUERY_DB_TABLE_2 (id integer not null, bucket integer not null)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE_2 (id, bucket) VALUES (1, 0)");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "${tableName}");
    runner.setIncomingConnection(true);
    runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "${maxValueCol}");
    runner.enqueue("".getBytes(), new HashMap<String, String>() {

        {
            put("tableName", "TEST_QUERY_DB_TABLE");
            put("maxValueCol", "id");
        }
    });
    runner.enqueue("".getBytes(), new HashMap<String, String>() {

        {
            put("tableName", "TEST_QUERY_DB_TABLE_2");
            put("maxValueCol", "id");
        }
    });
    runner.run(2);
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2);
    assertEquals(2, processor.columnTypeMap.size());
    runner.clearTransferState();
    // Remove one element from columnTypeMap to simulate it's re-cache partial state
    Map.Entry<String, Integer> entry = processor.columnTypeMap.entrySet().iterator().next();
    String key = entry.getKey();
    processor.columnTypeMap.remove(key);
    // Insert new records
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, bucket) VALUES (2, 0)");
    // Re-launch FlowFile to se if re-cache column type works
    runner.enqueue("".getBytes(), new HashMap<String, String>() {

        {
            put("tableName", "TEST_QUERY_DB_TABLE");
            put("maxValueCol", "id");
        }
    });
    // It should re-cache column type
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    assertEquals(2, processor.columnTypeMap.size());
    runner.clearTransferState();
}
Also used : SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) Matchers.anyString(org.mockito.Matchers.anyString) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.Test)

Example 62 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestGenerateTableFetch method testAddedRowsTwoTables.

@Test
public void testAddedRowsTwoTables() throws ClassNotFoundException, SQLException, InitializationException, IOException {
    // load test data to database
    final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection();
    Statement stmt = con.createStatement();
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    try {
        stmt.execute("drop table TEST_QUERY_DB_TABLE2");
    } catch (final SQLException sqle) {
    // Ignore this error, probably a "table does not exist" since Derby doesn't yet support DROP IF EXISTS [DERBY-4842]
    }
    stmt.execute("create table TEST_QUERY_DB_TABLE (id integer not null, name varchar(100), scale float, created_on timestamp, bignum bigint default 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (0, 'Joe Smith', 1.0, '1962-09-23 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (1, 'Carrie Jones', 5.0, '2000-01-01 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE (id, name, scale, created_on) VALUES (2, NULL, 2.0, '2010-01-01 00:00:00')");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "TEST_QUERY_DB_TABLE");
    runner.setIncomingConnection(false);
    runner.setProperty(GenerateTableFetch.MAX_VALUE_COLUMN_NAMES, "ID");
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    MockFlowFile flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    String query = new String(flowFile.toByteArray());
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query);
    ResultSet resultSet = stmt.executeQuery(query);
    // Should be three records
    assertTrue(resultSet.next());
    assertTrue(resultSet.next());
    assertTrue(resultSet.next());
    assertFalse(resultSet.next());
    runner.clearTransferState();
    // Run again, this time no flowfiles/rows should be transferred
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 0);
    runner.clearTransferState();
    // Create and populate a new table and re-run
    stmt.execute("create table TEST_QUERY_DB_TABLE2 (id integer not null, name varchar(100), scale float, created_on timestamp, bignum bigint default 0)");
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (0, 'Joe Smith', 1.0, '1962-09-23 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (1, 'Carrie Jones', 5.0, '2000-01-01 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (2, NULL, 2.0, '2010-01-01 00:00:00')");
    runner.setProperty(GenerateTableFetch.TABLE_NAME, "TEST_QUERY_DB_TABLE2");
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 1);
    flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    query = new String(flowFile.toByteArray());
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID <= 2 ORDER BY ID FETCH NEXT 10000 ROWS ONLY", query);
    resultSet = stmt.executeQuery(query);
    // Should be three records
    assertTrue(resultSet.next());
    assertTrue(resultSet.next());
    assertTrue(resultSet.next());
    assertFalse(resultSet.next());
    runner.clearTransferState();
    // Add 3 new rows with a higher ID and run with a partition size of 2. Two flow files should be transferred
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (3, 'Mary West', 15.0, '2000-01-01 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (4, 'Marty Johnson', 15.0, '2011-01-01 03:23:34.234')");
    stmt.execute("insert into TEST_QUERY_DB_TABLE2 (id, name, scale, created_on) VALUES (5, 'Marty Johnson', 15.0, '2011-01-01 03:23:34.234')");
    runner.setProperty(GenerateTableFetch.PARTITION_SIZE, "2");
    runner.run();
    runner.assertAllFlowFilesTransferred(REL_SUCCESS, 2);
    // Verify first flow file's contents
    flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
    query = new String(flowFile.toByteArray());
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID > 2 AND ID <= 5 ORDER BY ID FETCH NEXT 2 ROWS ONLY", query);
    resultSet = stmt.executeQuery(query);
    // Should be two records
    assertTrue(resultSet.next());
    assertTrue(resultSet.next());
    assertFalse(resultSet.next());
    // Verify second flow file's contents
    flowFile = runner.getFlowFilesForRelationship(REL_SUCCESS).get(1);
    query = new String(flowFile.toByteArray());
    assertEquals("SELECT * FROM TEST_QUERY_DB_TABLE2 WHERE ID > 2 AND ID <= 5 ORDER BY ID OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY", query);
    resultSet = stmt.executeQuery(query);
    // Should be one record
    assertTrue(resultSet.next());
    assertFalse(resultSet.next());
    runner.clearTransferState();
}
Also used : MockFlowFile(org.apache.nifi.util.MockFlowFile) SQLException(java.sql.SQLException) Statement(java.sql.Statement) Connection(java.sql.Connection) DBCPService(org.apache.nifi.dbcp.DBCPService) ResultSet(java.sql.ResultSet) Matchers.anyString(org.mockito.Matchers.anyString) Test(org.junit.Test)

Example 63 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestPutSQL method testRetryableFailure.

@Test
public void testRetryableFailure() throws InitializationException, ProcessException, SQLException, IOException {
    final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
    final DBCPService service = new SQLExceptionService(null);
    runner.addControllerService("dbcp", service);
    runner.enableControllerService(service);
    runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
    final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " + "UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
    final Map<String, String> attributes = new HashMap<>();
    attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.1.value", "1");
    attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
    attributes.put("sql.args.2.value", "Mark");
    attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.3.value", "84");
    attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.4.value", "1");
    runner.enqueue(sql.getBytes(), attributes);
    runner.run();
    // should fail because of the semicolon
    runner.assertAllFlowFilesTransferred(PutSQL.REL_RETRY, 1);
}
Also used : HashMap(java.util.HashMap) TestRunner(org.apache.nifi.util.TestRunner) DBCPService(org.apache.nifi.dbcp.DBCPService) Test(org.junit.Test)

Example 64 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class TestPutSQL method testRetryableFailureRollbackOnFailure.

@Test
public void testRetryableFailureRollbackOnFailure() throws InitializationException, ProcessException, SQLException, IOException {
    final TestRunner runner = TestRunners.newTestRunner(PutSQL.class);
    final DBCPService service = new SQLExceptionService(null);
    runner.addControllerService("dbcp", service);
    runner.enableControllerService(service);
    runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp");
    runner.setProperty(RollbackOnFailure.ROLLBACK_ON_FAILURE, "true");
    final String sql = "INSERT INTO PERSONS (ID, NAME, CODE) VALUES (?, ?, ?); " + "UPDATE PERSONS SET NAME='George' WHERE ID=?; ";
    final Map<String, String> attributes = new HashMap<>();
    attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.1.value", "1");
    attributes.put("sql.args.2.type", String.valueOf(Types.VARCHAR));
    attributes.put("sql.args.2.value", "Mark");
    attributes.put("sql.args.3.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.3.value", "84");
    attributes.put("sql.args.4.type", String.valueOf(Types.INTEGER));
    attributes.put("sql.args.4.value", "1");
    runner.enqueue(sql.getBytes(), attributes);
    try {
        runner.run();
        fail("ProcessException should be thrown");
    } catch (AssertionError e) {
        assertTrue(e.getCause() instanceof ProcessException);
        // Should not be routed to retry.
        runner.assertAllFlowFilesTransferred(PutSQL.REL_RETRY, 0);
    }
}
Also used : ProcessException(org.apache.nifi.processor.exception.ProcessException) HashMap(java.util.HashMap) TestRunner(org.apache.nifi.util.TestRunner) DBCPService(org.apache.nifi.dbcp.DBCPService) Test(org.junit.Test)

Example 65 with DBCPService

use of org.apache.nifi.dbcp.DBCPService in project nifi by apache.

the class QueryDatabaseTableTest method setup.

@Before
public void setup() throws InitializationException, IOException {
    final DBCPService dbcp = new DBCPServiceSimpleImpl();
    final Map<String, String> dbcpProperties = new HashMap<>();
    origDbAdapters = new HashMap<>(QueryDatabaseTable.dbAdapters);
    dbAdapter = new GenericDatabaseAdapter();
    QueryDatabaseTable.dbAdapters.put(dbAdapter.getName(), dbAdapter);
    processor = new MockQueryDatabaseTable();
    runner = TestRunners.newTestRunner(processor);
    runner.addControllerService("dbcp", dbcp, dbcpProperties);
    runner.enableControllerService(dbcp);
    runner.setProperty(QueryDatabaseTable.DBCP_SERVICE, "dbcp");
    runner.setProperty(QueryDatabaseTable.DB_TYPE, dbAdapter.getName());
    runner.getStateManager().clear(Scope.CLUSTER);
}
Also used : HashMap(java.util.HashMap) GenericDatabaseAdapter(org.apache.nifi.processors.standard.db.impl.GenericDatabaseAdapter) DBCPService(org.apache.nifi.dbcp.DBCPService) Before(org.junit.Before)

Aggregations

DBCPService (org.apache.nifi.dbcp.DBCPService)73 Connection (java.sql.Connection)61 Statement (java.sql.Statement)57 Test (org.junit.Test)57 SQLException (java.sql.SQLException)46 MockFlowFile (org.apache.nifi.util.MockFlowFile)28 HashMap (java.util.HashMap)25 ResultSet (java.sql.ResultSet)22 HiveDBCPService (org.apache.nifi.dbcp.hive.HiveDBCPService)21 File (java.io.File)18 TestRunner (org.apache.nifi.util.TestRunner)18 Matchers.anyString (org.mockito.Matchers.anyString)14 InputStream (java.io.InputStream)13 ProcessException (org.apache.nifi.processor.exception.ProcessException)10 ByteArrayInputStream (org.fusesource.hawtbuf.ByteArrayInputStream)9 StateManager (org.apache.nifi.components.state.StateManager)7 HashSet (java.util.HashSet)6 Map (java.util.Map)6 FlowFile (org.apache.nifi.flowfile.FlowFile)6 IOException (java.io.IOException)5