Search in sources :

Example 66 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CSVCommonsLoaderIT method testCSVCommonsUpsert_MultiTenant.

@Test
public void testCSVCommonsUpsert_MultiTenant() throws Exception {
    CSVParser parser = null;
    PhoenixConnection globalConn = null;
    PhoenixConnection tenantConn = null;
    try {
        String stockTableMultiName = generateUniqueName();
        // Create table using the global connection
        String statements = "CREATE TABLE IF NOT EXISTS " + stockTableMultiName + "(TENANT_ID VARCHAR NOT NULL, SYMBOL VARCHAR NOT NULL, COMPANY VARCHAR," + " CONSTRAINT PK PRIMARY KEY(TENANT_ID,SYMBOL)) MULTI_TENANT = true;";
        globalConn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(globalConn, new StringReader(statements), null);
        globalConn.close();
        tenantConn = new PhoenixTestDriver().connect(getUrl() + ";TenantId=acme", new Properties()).unwrap(PhoenixConnection.class);
        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(tenantConn, stockTableMultiName, Collections.<String>emptyList(), true);
        csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_HEADER));
        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = tenantConn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + stockTableMultiName);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(STOCK_CSV_VALUES_WITH_HEADER), csvUtil.getFormat());
        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getString(i + 1));
                i++;
            }
        }
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (tenantConn != null)
            tenantConn.close();
    }
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) PhoenixTestDriver(org.apache.phoenix.jdbc.PhoenixTestDriver) CSVParser(org.apache.commons.csv.CSVParser) StringReader(java.io.StringReader) CSVCommonsLoader(org.apache.phoenix.util.CSVCommonsLoader) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) CSVRecord(org.apache.commons.csv.CSVRecord) Properties(java.util.Properties) Test(org.junit.Test)

Example 67 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CSVCommonsLoaderIT method testAllDatatypes.

@Test
public void testAllDatatypes() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + DATATYPE_TABLE + " (CKEY VARCHAR NOT NULL PRIMARY KEY," + "  CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, DATATYPE_TABLE, Collections.<String>emptyList(), true);
        csvUtil.upsert(new StringReader(DATATYPES_CSV_VALUES));
        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM " + DATATYPE_TABLE);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES), csvUtil.getFormat());
        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            int size = record.size();
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getObject(i + 1).toString().toUpperCase());
                if (i < size - 2)
                    break;
                i++;
            }
            // special case for matching date, time values
            String timeFieldValue = record.get(9);
            assertEquals(timeFieldValue.isEmpty() ? null : DateUtil.parseTime(record.get(9)), phoenixResultSet.getTime("CTIME"));
            String dateField = record.get(10);
            assertEquals(dateField.isEmpty() ? null : DateUtil.parseDate(record.get(10)), phoenixResultSet.getDate("CDATE"));
        }
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) CSVParser(org.apache.commons.csv.CSVParser) StringReader(java.io.StringReader) CSVCommonsLoader(org.apache.phoenix.util.CSVCommonsLoader) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) CSVRecord(org.apache.commons.csv.CSVRecord) Test(org.junit.Test)

Example 68 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CSVCommonsLoaderIT method testCSVUpsertWithCustomDelimiters.

@Test
public void testCSVUpsertWithCustomDelimiters() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;
    try {
        String stockTableName = generateUniqueName();
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName, Arrays.<String>asList(STOCK_COLUMNS), true, '1', '2', '3', CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
        csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER));
        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + stockTableName);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER), csvUtil.getFormat());
        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getString(i + 1));
                i++;
            }
        }
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) CSVParser(org.apache.commons.csv.CSVParser) StringReader(java.io.StringReader) CSVCommonsLoader(org.apache.phoenix.util.CSVCommonsLoader) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) CSVRecord(org.apache.commons.csv.CSVRecord) Test(org.junit.Test)

Example 69 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CSVFileResultHandler method read.

public synchronized List<Result> read() throws IOException {
    CSVParser parser = null;
    util.ensureBaseResultDirExists();
    try {
        File file = new File(resultFileName);
        parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT);
        List<CSVRecord> records = parser.getRecords();
        List<Result> results = new ArrayList<>();
        String header = null;
        for (CSVRecord record : records) {
            // First record is the CSV Header
            if (record.getRecordNumber() == 1) {
                header = record.toString();
                continue;
            }
            List<ResultValue> resultValues = new ArrayList<>();
            for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) {
                resultValues.add(new ResultValue(val));
            }
            Result result = new Result(resultFileDetails, header, resultValues);
            results.add(result);
        }
        return results;
    } finally {
        parser.close();
    }
}
Also used : CSVParser(org.apache.commons.csv.CSVParser) ArrayList(java.util.ArrayList) CSVRecord(org.apache.commons.csv.CSVRecord) ResultValue(org.apache.phoenix.pherf.result.ResultValue) File(java.io.File) Result(org.apache.phoenix.pherf.result.Result)

Example 70 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CsvEventSerializer method upsertEvents.

@Override
public void upsertEvents(List<Event> events) throws SQLException {
    Preconditions.checkNotNull(events);
    Preconditions.checkNotNull(connection);
    Preconditions.checkNotNull(this.upsertStatement);
    boolean wasAutoCommit = connection.getAutoCommit();
    connection.setAutoCommit(false);
    try (PreparedStatement colUpsert = connection.prepareStatement(upsertStatement)) {
        String value = null;
        Integer sqlType = null;
        for (Event event : events) {
            byte[] payloadBytes = event.getBody();
            if (payloadBytes == null || payloadBytes.length == 0) {
                continue;
            }
            String payload = new String(payloadBytes);
            CSVRecord csvRecord = csvLineParser.parse(payload);
            if (colNames.size() != csvRecord.size()) {
                logger.debug("payload data {} doesn't match the fields mapping {} ", payload, colNames);
                continue;
            }
            Map<String, String> data = new HashMap<String, String>();
            for (int i = 0; i < csvRecord.size(); i++) {
                data.put(colNames.get(i), csvRecord.get(i));
            }
            Collection<String> values = data.values();
            if (values.contains(null)) {
                logger.debug("payload data {} doesn't match the fields mapping {} ", payload, colNames);
                continue;
            }
            int index = 1;
            int offset = 0;
            for (int i = 0; i < colNames.size(); i++, offset++) {
                if (columnMetadata[offset] == null) {
                    continue;
                }
                String colName = colNames.get(i);
                value = data.get(colName);
                sqlType = columnMetadata[offset].getSqlType();
                PDataType pDataType = PDataType.fromTypeId(sqlType);
                Object upsertValue;
                if (pDataType.isArrayType()) {
                    String arrayJson = Arrays.toString(value.split(csvArrayDelimiter));
                    JSONArray jsonArray = new JSONArray(new JSONTokener(arrayJson));
                    Object[] vals = new Object[jsonArray.length()];
                    for (int x = 0; x < jsonArray.length(); x++) {
                        vals[x] = jsonArray.get(x);
                    }
                    String baseTypeSqlName = PDataType.arrayBaseType(pDataType).getSqlTypeName();
                    Array array = connection.createArrayOf(baseTypeSqlName, vals);
                    upsertValue = pDataType.toObject(array, pDataType);
                } else {
                    upsertValue = pDataType.toObject(value);
                }
                if (upsertValue != null) {
                    colUpsert.setObject(index++, upsertValue, sqlType);
                } else {
                    colUpsert.setNull(index++, sqlType);
                }
            }
            // add headers if necessary
            Map<String, String> headerValues = event.getHeaders();
            for (int i = 0; i < headers.size(); i++, offset++) {
                String headerName = headers.get(i);
                String headerValue = headerValues.get(headerName);
                sqlType = columnMetadata[offset].getSqlType();
                Object upsertValue = PDataType.fromTypeId(sqlType).toObject(headerValue);
                if (upsertValue != null) {
                    colUpsert.setObject(index++, upsertValue, sqlType);
                } else {
                    colUpsert.setNull(index++, sqlType);
                }
            }
            if (autoGenerateKey) {
                sqlType = columnMetadata[offset].getSqlType();
                String generatedRowValue = this.keyGenerator.generate();
                Object rowkeyValue = PDataType.fromTypeId(sqlType).toObject(generatedRowValue);
                colUpsert.setObject(index++, rowkeyValue, sqlType);
            }
            colUpsert.execute();
        }
        connection.commit();
    } catch (Exception ex) {
        logger.error("An error {} occurred during persisting the event ", ex.getMessage());
        throw new SQLException(ex.getMessage());
    } finally {
        if (wasAutoCommit) {
            connection.setAutoCommit(true);
        }
    }
}
Also used : HashMap(java.util.HashMap) SQLException(java.sql.SQLException) JSONArray(org.json.JSONArray) PreparedStatement(java.sql.PreparedStatement) SQLException(java.sql.SQLException) IOException(java.io.IOException) JSONTokener(org.json.JSONTokener) Array(java.sql.Array) JSONArray(org.json.JSONArray) PDataType(org.apache.phoenix.schema.types.PDataType) Event(org.apache.flume.Event) CSVRecord(org.apache.commons.csv.CSVRecord)

Aggregations

CSVRecord (org.apache.commons.csv.CSVRecord)127 CSVParser (org.apache.commons.csv.CSVParser)71 IOException (java.io.IOException)40 CSVFormat (org.apache.commons.csv.CSVFormat)40 ArrayList (java.util.ArrayList)36 Reader (java.io.Reader)24 StringReader (java.io.StringReader)22 InputStreamReader (java.io.InputStreamReader)18 FileReader (java.io.FileReader)16 Test (org.junit.Test)14 Path (java.nio.file.Path)13 HashMap (java.util.HashMap)11 File (java.io.File)10 PreparedStatement (java.sql.PreparedStatement)10 InputStream (java.io.InputStream)9 ResultSet (java.sql.ResultSet)9 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)9 CSVCommonsLoader (org.apache.phoenix.util.CSVCommonsLoader)9 BufferedReader (java.io.BufferedReader)8 Map (java.util.Map)7