use of org.apache.commons.csv.CSVParser in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVUpsertWithInvalidNumericalData_StrictMode.
// Ensure that strict mode also causes the import to stop if a data type on a single
// row is not correct
@Test
public void testCSVUpsertWithInvalidNumericalData_StrictMode() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
String stockTableName = generateUniqueName();
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY_ID BIGINT);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file in strict mode
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName, Arrays.asList("SYMBOL", "COMPANY_ID"), true);
try {
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
fail("Running an upsert with data that can't be upserted in strict mode " + "should throw an exception");
} catch (IllegalDataException e) {
// Expected
}
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVParser in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVCommonsUpsert_WithArray.
@Test
public void testCSVCommonsUpsert_WithArray() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS ARRAY_TABLE " + "(ID BIGINT NOT NULL PRIMARY KEY, VALARRAY INTEGER ARRAY);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "ARRAY_TABLE", ImmutableList.<String>of(), true, ',', '"', null, "!");
csvUtil.upsert(new StringReader("ID,VALARRAY\n" + "1,2!3!4\n"));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn.prepareStatement("SELECT ID, VALARRAY FROM ARRAY_TABLE");
ResultSet phoenixResultSet = statement.executeQuery();
assertTrue(phoenixResultSet.next());
assertEquals(1L, phoenixResultSet.getLong(1));
assertEquals(PArrayDataType.instantiatePhoenixArray(PInteger.INSTANCE, new Integer[] { 2, 3, 4 }), phoenixResultSet.getArray(2));
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVParser in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVUpsertWithBogusColumnStrict.
@Test
public void testCSVUpsertWithBogusColumnStrict() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
String stockTableName = generateUniqueName();
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName, Arrays.asList(STOCK_COLUMNS_WITH_BOGUS), true);
try {
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
fail();
} catch (SQLException e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 504 (42703): Undefined column. columnName=" + stockTableName + ".BOGUS"));
}
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVParser in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVUpsertWithColumns.
@Test
public void testCSVUpsertWithColumns() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
String stockTableName = generateUniqueName();
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName, Arrays.<String>asList(STOCK_COLUMNS), true);
// no header
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + stockTableName);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(STOCK_CSV_VALUES), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVParser in project opennms by OpenNMS.
the class RScriptExecutor method fromCsv.
/**
* Convert the CSV string to an immutable table.
*/
protected static ImmutableTable<Long, String, Double> fromCsv(final String csv) throws IOException {
ImmutableTable.Builder<Long, String, Double> builder = ImmutableTable.builder();
try (StringReader reader = new StringReader(csv);
CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader())) {
long rowIndex = 0;
Map<String, Integer> headerMap = parser.getHeaderMap();
for (CSVRecord record : parser) {
for (String key : headerMap.keySet()) {
Double value;
try {
value = Double.valueOf(record.get(key));
} catch (NumberFormatException e) {
value = Double.NaN;
}
builder.put(rowIndex, key, value);
}
rowIndex++;
}
}
return builder.build();
}
Aggregations