use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVCommonsUpsert_MultiTenant.
@Test
public void testCSVCommonsUpsert_MultiTenant() throws Exception {
CSVParser parser = null;
PhoenixConnection globalConn = null;
PhoenixConnection tenantConn = null;
try {
String stockTableMultiName = generateUniqueName();
// Create table using the global connection
String statements = "CREATE TABLE IF NOT EXISTS " + stockTableMultiName + "(TENANT_ID VARCHAR NOT NULL, SYMBOL VARCHAR NOT NULL, COMPANY VARCHAR," + " CONSTRAINT PK PRIMARY KEY(TENANT_ID,SYMBOL)) MULTI_TENANT = true;";
globalConn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(globalConn, new StringReader(statements), null);
globalConn.close();
tenantConn = new PhoenixTestDriver().connect(getUrl() + ";TenantId=acme", new Properties()).unwrap(PhoenixConnection.class);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(tenantConn, stockTableMultiName, Collections.<String>emptyList(), true);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = tenantConn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + stockTableMultiName);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(STOCK_CSV_VALUES_WITH_HEADER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (tenantConn != null)
tenantConn.close();
}
}
use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CSVCommonsLoaderIT method testAllDatatypes.
@Test
public void testAllDatatypes() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + DATATYPE_TABLE + " (CKEY VARCHAR NOT NULL PRIMARY KEY," + " CVARCHAR VARCHAR, CCHAR CHAR(10), CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, DATATYPE_TABLE, Collections.<String>emptyList(), true);
csvUtil.upsert(new StringReader(DATATYPES_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn.prepareStatement("SELECT CKEY, CVARCHAR, CCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM " + DATATYPE_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
int size = record.size();
for (String value : record) {
assertEquals(value, phoenixResultSet.getObject(i + 1).toString().toUpperCase());
if (i < size - 2)
break;
i++;
}
// special case for matching date, time values
String timeFieldValue = record.get(9);
assertEquals(timeFieldValue.isEmpty() ? null : DateUtil.parseTime(record.get(9)), phoenixResultSet.getTime("CTIME"));
String dateField = record.get(10);
assertEquals(dateField.isEmpty() ? null : DateUtil.parseDate(record.get(10)), phoenixResultSet.getDate("CDATE"));
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CSVCommonsLoaderIT method testCSVUpsertWithCustomDelimiters.
@Test
public void testCSVUpsertWithCustomDelimiters() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
String stockTableName = generateUniqueName();
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + stockTableName + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, stockTableName, Arrays.<String>asList(STOCK_COLUMNS), true, '1', '2', '3', CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + stockTableName);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CSVFileResultHandler method read.
public synchronized List<Result> read() throws IOException {
CSVParser parser = null;
util.ensureBaseResultDirExists();
try {
File file = new File(resultFileName);
parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT);
List<CSVRecord> records = parser.getRecords();
List<Result> results = new ArrayList<>();
String header = null;
for (CSVRecord record : records) {
// First record is the CSV Header
if (record.getRecordNumber() == 1) {
header = record.toString();
continue;
}
List<ResultValue> resultValues = new ArrayList<>();
for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) {
resultValues.add(new ResultValue(val));
}
Result result = new Result(resultFileDetails, header, resultValues);
results.add(result);
}
return results;
} finally {
parser.close();
}
}
use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CsvEventSerializer method upsertEvents.
@Override
public void upsertEvents(List<Event> events) throws SQLException {
Preconditions.checkNotNull(events);
Preconditions.checkNotNull(connection);
Preconditions.checkNotNull(this.upsertStatement);
boolean wasAutoCommit = connection.getAutoCommit();
connection.setAutoCommit(false);
try (PreparedStatement colUpsert = connection.prepareStatement(upsertStatement)) {
String value = null;
Integer sqlType = null;
for (Event event : events) {
byte[] payloadBytes = event.getBody();
if (payloadBytes == null || payloadBytes.length == 0) {
continue;
}
String payload = new String(payloadBytes);
CSVRecord csvRecord = csvLineParser.parse(payload);
if (colNames.size() != csvRecord.size()) {
logger.debug("payload data {} doesn't match the fields mapping {} ", payload, colNames);
continue;
}
Map<String, String> data = new HashMap<String, String>();
for (int i = 0; i < csvRecord.size(); i++) {
data.put(colNames.get(i), csvRecord.get(i));
}
Collection<String> values = data.values();
if (values.contains(null)) {
logger.debug("payload data {} doesn't match the fields mapping {} ", payload, colNames);
continue;
}
int index = 1;
int offset = 0;
for (int i = 0; i < colNames.size(); i++, offset++) {
if (columnMetadata[offset] == null) {
continue;
}
String colName = colNames.get(i);
value = data.get(colName);
sqlType = columnMetadata[offset].getSqlType();
PDataType pDataType = PDataType.fromTypeId(sqlType);
Object upsertValue;
if (pDataType.isArrayType()) {
String arrayJson = Arrays.toString(value.split(csvArrayDelimiter));
JSONArray jsonArray = new JSONArray(new JSONTokener(arrayJson));
Object[] vals = new Object[jsonArray.length()];
for (int x = 0; x < jsonArray.length(); x++) {
vals[x] = jsonArray.get(x);
}
String baseTypeSqlName = PDataType.arrayBaseType(pDataType).getSqlTypeName();
Array array = connection.createArrayOf(baseTypeSqlName, vals);
upsertValue = pDataType.toObject(array, pDataType);
} else {
upsertValue = pDataType.toObject(value);
}
if (upsertValue != null) {
colUpsert.setObject(index++, upsertValue, sqlType);
} else {
colUpsert.setNull(index++, sqlType);
}
}
// add headers if necessary
Map<String, String> headerValues = event.getHeaders();
for (int i = 0; i < headers.size(); i++, offset++) {
String headerName = headers.get(i);
String headerValue = headerValues.get(headerName);
sqlType = columnMetadata[offset].getSqlType();
Object upsertValue = PDataType.fromTypeId(sqlType).toObject(headerValue);
if (upsertValue != null) {
colUpsert.setObject(index++, upsertValue, sqlType);
} else {
colUpsert.setNull(index++, sqlType);
}
}
if (autoGenerateKey) {
sqlType = columnMetadata[offset].getSqlType();
String generatedRowValue = this.keyGenerator.generate();
Object rowkeyValue = PDataType.fromTypeId(sqlType).toObject(generatedRowValue);
colUpsert.setObject(index++, rowkeyValue, sqlType);
}
colUpsert.execute();
}
connection.commit();
} catch (Exception ex) {
logger.error("An error {} occurred during persisting the event ", ex.getMessage());
throw new SQLException(ex.getMessage());
} finally {
if (wasAutoCommit) {
connection.setAutoCommit(true);
}
}
}
Aggregations