use of com.cubrid.common.core.reader.CSVReader in project cubrid-manager by CUBRID.
the class PstmtDataTask method getRowParameterListFromCSV.
/**
*
* Get the parameter of PSTMT list from CSV file
*
* @return List<List<PstmtParameter>>
*/
private List<List<PstmtParameter>> getRowParameterListFromCSV() {
// FIXME move this logic to core module
List<List<PstmtParameter>> rowParaList = new ArrayList<List<PstmtParameter>>();
File file = new File(fileName);
CSVReader csvReader = null;
try {
if (fileCharset == null || fileCharset.trim().length() == 0) {
csvReader = new CSVReader(new FileReader(file));
} else {
csvReader = new CSVReader(new InputStreamReader(new FileInputStream(file), fileCharset));
}
int currentRow = 0;
if (isFirstRowAsColumn) {
csvReader.readNext();
currentRow++;
}
String[] cvsRow;
while ((cvsRow = csvReader.readNext()) != null) {
List<PstmtParameter> paraList = new ArrayList<PstmtParameter>();
for (int j = 0; j < parameterList.size(); j++) {
PstmtParameter pstmtParameter = parameterList.get(j);
PstmtParameter newParam = new PstmtParameter(pstmtParameter.getParamName(), pstmtParameter.getParamIndex(), pstmtParameter.getDataType(), null);
int column = Integer.parseInt(pstmtParameter.getStringParamValue());
String content = null;
if (cvsRow.length > column) {
content = cvsRow[column];
}
String dataType = DataType.getRealType(pstmtParameter.getDataType());
content = FieldHandlerUtils.getRealValueForImport(dataType, content, parentFile);
FormatDataResult formatDataResult = DBAttrTypeFormatter.format(DataType.getRealType(pstmtParameter.getDataType()), content, false, dbCharset, true);
if (formatDataResult.isSuccess()) {
newParam.setParamValue(content);
newParam.setCharSet(fileCharset);
} else {
dataTypeErrorHandling(getErrorMsg(currentRow, column, dataType));
newParam.setParamValue(null);
newParam.setCharSet(fileCharset);
}
paraList.add(newParam);
}
rowParaList.add(paraList);
currentRow++;
}
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (DataFormatException ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (csvReader != null) {
try {
csvReader.close();
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
return rowParaList;
}
use of com.cubrid.common.core.reader.CSVReader in project cubrid-manager by CUBRID.
the class PstmtDataTask method executeFromCSV.
/**
*
* Do with data from CSV file
*
* @param monitor IProgressMonitor
*/
private void executeFromCSV(IProgressMonitor monitor) {
// FIXME move this logic to core module
File file = new File(fileName);
CSVReader csvReader = null;
try {
if (fileCharset == null || fileCharset.trim().length() == 0) {
csvReader = new CSVReader(new FileReader(file));
} else {
csvReader = new CSVReader(new InputStreamReader(new FileInputStream(file), fileCharset));
}
if (isFirstRowAsColumn) {
csvReader.readNext();
}
int currentRow = 0;
int rowNum = 0;
String[] cvsRow;
while ((cvsRow = csvReader.readNext()) != null && currentRow < rowCount) {
rowNum++;
if (startRow >= rowNum) {
continue;
}
for (int j = 0; j < parameterList.size(); j++) {
PstmtParameter pstmtParameter = parameterList.get(j);
int column = Integer.parseInt(pstmtParameter.getStringParamValue());
String content = null;
if (cvsRow.length > column) {
content = cvsRow[column];
}
String dataType = DataType.getRealType(pstmtParameter.getDataType());
content = FieldHandlerUtils.getRealValueForImport(dataType, content, parentFile);
FormatDataResult formatDataResult = DBAttrTypeFormatter.format(dataType, content, false, dbCharset, true);
if (formatDataResult.isSuccess()) {
PstmtParameter parameter = new PstmtParameter(pstmtParameter.getParamName(), pstmtParameter.getParamIndex(), pstmtParameter.getDataType(), content);
parameter.setCharSet(fileCharset);
FieldHandlerUtils.setPreparedStatementValue(parameter, pStmt, dbCharset);
} else {
int row = isFirstRowAsColumn ? currentRow + 1 : currentRow;
dataTypeErrorHandling(getErrorMsg(row, column, dataType));
PstmtParameter parameter = new PstmtParameter(pstmtParameter.getParamName(), pstmtParameter.getParamIndex(), pstmtParameter.getDataType(), null);
parameter.setCharSet(fileCharset);
FieldHandlerUtils.setPreparedStatementValue(parameter, pStmt, dbCharset);
}
}
if (pStmt != null) {
pStmt.addBatch();
monitor.worked(PROGRESS_ROW);
workedProgress += PROGRESS_ROW;
}
currentRow++;
if (currentRow > 0 && currentRow % commitLineCountOnce == 0) {
commit(monitor, currentRow);
}
if (isCancel) {
return;
}
}
if (currentRow > 0 && currentRow % commitLineCountOnce > 0) {
commit(monitor, currentRow);
}
} catch (SQLException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (DataFormatException ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (csvReader != null) {
try {
csvReader.close();
csvReader = null;
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
}
use of com.cubrid.common.core.reader.CSVReader in project cubrid-manager by CUBRID.
the class ImportFromCsvRunnable method doRun.
/* (non-Javadoc)
* @see com.cubrid.common.ui.cubrid.table.dialog.imp.progress.AbsImportDataThread#doRun()
*/
protected void doRun() throws Exception {
// FIXME move this logic to core module
if (pStmt == null) {
handleEvent(new ImportDataFailedEvent(tableName, tableConfig.getLineCount(), tableConfig.getInsertDML(), "Invalid parameters."));
return;
}
File file = new File(tableConfig.getFilePath());
boolean isFirstRowAsColumn = tableConfig.isFirstRowAsColumn();
String fileCharset = importConfig.getFilesCharset();
File parentFile;
if (file.exists()) {
parentFile = file.getParentFile();
} else {
parentFile = null;
}
CSVReader csvReader = null;
int currentRow = 0;
List<ImportRowData> rowList = new ArrayList<ImportRowData>();
try {
if (fileCharset == null || fileCharset.trim().length() == 0) {
csvReader = new CSVReader(new FileReader(file));
} else {
csvReader = new CSVReader(new InputStreamReader(new FileInputStream(file), fileCharset));
}
if (isFirstRowAsColumn) {
csvReader.readNext();
currentRow++;
}
String[] cvsRow;
ImportRowData rowData = null;
while ((cvsRow = csvReader.readNext()) != null) {
boolean isSuccess = true;
try {
/*Process the row data*/
rowData = processRowData(cvsRow, null, currentRow, parentFile);
rowList.add(rowData);
pStmt.addBatch();
currentRow++;
/*Process commit*/
if (rowList.size() >= importConfig.getCommitLine()) {
commit(rowList);
}
if (isCanceled) {
return;
}
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (StopPerformException ex) {
isSuccess = false;
handleEvent(new ImportDataTableFailedEvent(tableName));
break;
} finally {
if (!isSuccess) {
rowData.setStatus(ImportStatus.STATUS_COMMIT_FAILED);
writeErrorLog(rowData);
}
}
}
if (rowList.size() > 0) {
commit(rowList);
rowList = null;
}
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (csvReader != null) {
try {
csvReader.close();
csvReader = null;
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
}
use of com.cubrid.common.core.reader.CSVReader in project cubrid-manager by CUBRID.
the class CSVImportFileHandler method getSourceFileInfo.
/**
* Get the source file information
*
* @return ImportFileDescription
* @throws Exception in process.
*/
public ImportFileDescription getSourceFileInfo() throws Exception {
// FIXME move this logic to core module
final List<String> colsList = new ArrayList<String>();
final List<Integer> itemsNumberOfSheets = new ArrayList<Integer>();
final ImportFileDescription importFileDescription = new ImportFileDescription(0, 1, colsList);
importFileDescription.setItemsNumberOfSheets(itemsNumberOfSheets);
IRunnableWithProgress runnable = new IRunnableWithProgress() {
public void run(final IProgressMonitor monitor) {
monitor.beginTask("", IProgressMonitor.UNKNOWN);
int totalRowCount = 0;
CSVReader csvReader = null;
try {
if (fileCharset == null || fileCharset.trim().length() == 0) {
csvReader = new CSVReader(new FileReader(fileName));
} else {
csvReader = new CSVReader(new InputStreamReader(new FileInputStream(fileName), fileCharset));
}
String[] cvsRow = csvReader.readNext();
if (cvsRow != null) {
totalRowCount++;
for (String title : cvsRow) {
colsList.add(title);
}
}
while (!monitor.isCanceled() && csvReader.readNext() != null) {
totalRowCount++;
}
itemsNumberOfSheets.add(Integer.valueOf(totalRowCount));
if (monitor.isCanceled()) {
throw new InterruptedException();
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
} finally {
importFileDescription.setTotalCount(totalRowCount);
importFileDescription.setFirstRowCols(colsList);
importFileDescription.setItemsNumberOfSheets(itemsNumberOfSheets);
closeFile(csvReader);
monitor.done();
}
}
};
PlatformUI.getWorkbench().getProgressService().busyCursorWhile(runnable);
return importFileDescription;
}
use of com.cubrid.common.core.reader.CSVReader in project cubrid-manager by CUBRID.
the class DataType method getCollectionValues.
/**
* Return Object[] array value from a collection value based the given data
* type, eg: data type: integer, collection value: {1,2,3} return Object[]:
* Integer[]{1,2,3}
*
*
* @param type String The given type
* @param value String The given value
* @param isUseNULLValueSetting
* @return Object[]
* @throws ParseException a possible exception
* @throws NumberFormatException a possible exception
*/
public static Object[] getCollectionValues(String type, String value, boolean isUseNULLValueSetting) throws NumberFormatException, ParseException {
String strs = value;
String innerType = getTypeRemain(type);
assert (innerType != null);
if (innerType == null) {
return null;
}
if (value.startsWith("{") && value.endsWith("}")) {
strs = value.substring(1, value.length() - 1);
}
CSVReader reader = new CSVReader(new StringReader(strs));
String[] values = new String[0];
try {
values = reader.readNext();
} catch (IOException ignored) {
logger.error("", ignored);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
logger.error("", e);
}
}
}
Object[] ret = null;
if (innerType.equalsIgnoreCase(DATATYPE_SMALLINT) || innerType.equalsIgnoreCase(DATATYPE_INTEGER)) {
ret = new Integer[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_BIGINT)) {
ret = new Long[values.length];
} else if (innerType.startsWith("numeric(") && innerType.endsWith(",0)")) {
ret = new Long[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_FLOAT)) {
ret = new Double[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_DOUBLE) || innerType.equalsIgnoreCase(DATATYPE_MONETARY)) {
ret = new Double[values.length];
} else if (innerType.startsWith("numeric(") && !innerType.endsWith(",0)")) {
ret = new Double[values.length];
} else if (innerType.startsWith("character") || innerType.equalsIgnoreCase(DATATYPE_STRING)) {
ret = new String[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_TIME)) {
ret = new java.sql.Time[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_DATE)) {
ret = new java.sql.Date[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_TIMESTAMP)) {
ret = new java.sql.Timestamp[values.length];
} else if (innerType.equalsIgnoreCase(DATATYPE_DATETIME)) {
ret = new java.sql.Timestamp[values.length];
} else {
ret = new String[values.length];
}
for (int i = 0; i < values.length; i++) {
if (innerType.equalsIgnoreCase(DATATYPE_SMALLINT) || innerType.equalsIgnoreCase(DATATYPE_INTEGER)) {
ret[i] = new Integer(values[i].trim());
} else if (innerType.equalsIgnoreCase(DATATYPE_BIGINT)) {
ret[i] = new Long(values[i].trim());
} else if (innerType.startsWith("numeric(") && innerType.endsWith(",0)")) {
ret[i] = new Long(values[i].trim());
} else if (innerType.equalsIgnoreCase(DATATYPE_FLOAT)) {
ret[i] = new Double(values[i].trim());
} else if (innerType.equalsIgnoreCase(DATATYPE_DOUBLE) || innerType.equalsIgnoreCase(DATATYPE_MONETARY)) {
ret[i] = new Double(values[i].trim());
} else if (innerType.startsWith("numeric(") && !innerType.endsWith(",0)")) {
ret[i] = new Double(values[i].trim());
} else if (innerType.startsWith("character") || innerType.equalsIgnoreCase(DATATYPE_STRING)) {
ret[i] = values[i];
} else if (innerType.equalsIgnoreCase(DATATYPE_TIME)) {
ret[i] = java.sql.Time.valueOf(values[i].trim());
} else if (innerType.equalsIgnoreCase(DATATYPE_DATE)) {
ret[i] = java.sql.Date.valueOf(values[i].trim());
} else if (innerType.equalsIgnoreCase(DATATYPE_TIMESTAMP)) {
long time = DateUtil.getDatetime(values[i].trim());
java.sql.Timestamp timestamp = new java.sql.Timestamp(time);
ret[i] = timestamp;
} else if (innerType.equalsIgnoreCase(DATATYPE_DATETIME)) {
String formatValue = DateUtil.formatDateTime(values[i], DBAttrTypeFormatter.DATETIME_FORMAT);
if (formatValue == null) {
formatValue = values[i];
}
long time = DateUtil.getDatetime(formatValue);
java.sql.Timestamp timestamp = new java.sql.Timestamp(time);
ret[i] = timestamp;
} else if (innerType.startsWith("bit(") || innerType.startsWith("bit varying(")) {
ret[i] = DBAttrTypeFormatter.formatValue(innerType, values[i].trim(), isUseNULLValueSetting);
} else {
ret[i] = values[i].trim();
}
}
return ret;
}
Aggregations