use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromSQLRunnable method doRun.
@Override
public void doRun() throws Exception {
// FIXME move this logic to core module
BufferedReader reader = null;
try {
File file = new File(tableConfig.getFilePath());
reader = new BufferedReader(new InputStreamReader(new FileInputStream(file.getAbsoluteFile()), importConfig.getFilesCharset()));
while (!end) {
List<ImportRowData> sqlList = loadSQL(reader);
if (sqlList.size() == 0) {
continue;
}
executeSQL(conn, sqlList);
if (isCanceled) {
return;
}
}
} catch (Exception e) {
throw e;
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception e) {
LOGGER.error("", e);
}
}
}
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromXlsRunnable method doRun.
/* (non-Javadoc)
* @see com.cubrid.common.ui.cubrid.table.dialog.imp.progress.AbsImportDataThread#doRun()
*/
@Override
protected void doRun() throws Exception {
// FIXME move this logic to core module
if (pStmt == null) {
handleEvent(new ImportDataFailedEvent(tableName, tableConfig.getLineCount(), tableConfig.getInsertDML(), "Invalid parameters."));
return;
}
String fileName = tableConfig.getFilePath();
boolean isFirstRowAsColumn = tableConfig.isFirstRowAsColumn();
File parentFile;
File file = new File(fileName);
if (file.exists()) {
parentFile = file.getParentFile();
} else {
parentFile = null;
}
int start = 0;
if (isFirstRowAsColumn) {
start = 1;
}
try {
XLSImportFileHandler fileHandler = (XLSImportFileHandler) ImportFileHandlerFactory.getHandler(fileName, importConfig);
Sheet[] sheets = fileHandler.getSheets();
ImportFileDescription fileDesc = getFileDescription(fileHandler);
int currentRow = 0;
List<ImportRowData> rowList = new ArrayList<ImportRowData>();
for (int sheetNum = 0; sheetNum < sheets.length; sheetNum++) {
int rows = fileDesc.getItemsNumberOfSheets().get(sheetNum);
Sheet sheet = sheets[sheetNum];
String[] rowContent = null;
String[] patterns = null;
ImportRowData rowData = null;
String content = null;
String pattern = null;
for (int i = start; i < rows; i++) {
boolean isSuccess = true;
try {
int columns = sheet.getColumns();
for (int j = 0; j < columns; j++) {
rowContent = new String[columns];
patterns = new String[columns];
Cell[] cells = sheet.getRow(i);
for (int k = 0; k < cells.length; k++) {
Cell cell = cells[k];
content = null;
pattern = null;
if (cell == null) {
content = null;
} else if (cell instanceof EmptyCell) {
content = null;
} else {
content = cell.getContents();
CellFormat format = cell.getCellFormat();
if (format != null && format.getFormat() != null) {
pattern = format.getFormat().getFormatString();
}
}
rowContent[k] = content;
patterns[k] = pattern;
}
}
/*Process the row data*/
rowData = processRowData(rowContent, patterns, currentRow, parentFile);
pStmt.addBatch();
rowList.add(rowData);
currentRow++;
/*Process commit*/
if (rowList.size() >= importConfig.getCommitLine()) {
commit(rowList);
}
if (isCanceled) {
return;
}
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (StopPerformException ex) {
isSuccess = false;
handleEvent(new ImportDataTableFailedEvent(tableName));
LOGGER.debug("Stop import by user setting.");
break;
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (!isSuccess) {
rowData.setStatus(ImportStatus.STATUS_COMMIT_FAILED);
writeErrorLog(rowData);
}
}
}
}
if (rowList.size() > 0) {
commit(rowList);
}
} catch (BiffException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (Exception ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
}
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class AbsImportRunnable method processRowData.
protected ImportRowData processRowData(String[] columnArray, String[] columnPattern, int currentRow, File parentFile) throws StopPerformException {
// FIXME move this logic to core module
ImportRowData rowData = new ImportRowData(currentRow);
ImportColumnData columnData = null;
boolean isSuccess = false;
try {
for (int j = 0; j < tableConfig.getPstmList().size(); j++) {
PstmtParameter pstmtParameter = tableConfig.getPstmList().get(j);
int column = Integer.parseInt(pstmtParameter.getStringParamValue());
String content = null;
String pattern = null;
if (columnArray.length > column) {
content = columnArray[column];
}
if (columnPattern != null && columnPattern.length > column) {
pattern = columnPattern[column];
}
/*Recored the origin data*/
columnData = new ImportColumnData(content);
rowData.getColumnList().add(columnData);
String dataType = DataType.getRealType(pstmtParameter.getDataType());
Object value = getRealValueForImport(dataType, content, parentFile);
try {
PstmtParameter parameter = new PstmtParameter(pstmtParameter.getParamName(), pstmtParameter.getParamIndex(), pstmtParameter.getDataType(), value);
parameter.setCharSet(importConfig.getFilesCharset());
if (StringUtil.isNotEmpty(pattern)) {
parameter.setDatePattern(pattern);
}
if (value != null && value instanceof File) {
parameter.setFileValue(true);
}
setPreparedStatementValue(pStmt, parameter, dbCharset);
columnData.setStatus(ImportStatus.STATUS_FORMAT_SUCCESS);
isSuccess = true;
} catch (ParamSetException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} finally {
if (!isSuccess) {
columnData.setStatus(ImportStatus.STATUS_FORMAT_FAILED);
dataTypeErrorHandling(getErrorMsg(currentRow, column, dataType));
PstmtParameter parameter = new PstmtParameter(pstmtParameter.getParamName(), pstmtParameter.getParamIndex(), pstmtParameter.getDataType(), null);
parameter.setCharSet(importConfig.getFilesCharset());
try {
setPreparedStatementNULL(pStmt, parameter);
} catch (SQLException e) {
LOGGER.debug(e.getMessage());
}
}
}
}
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
}
return rowData;
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromCsvRunnable method doRun.
/* (non-Javadoc)
* @see com.cubrid.common.ui.cubrid.table.dialog.imp.progress.AbsImportDataThread#doRun()
*/
protected void doRun() throws Exception {
// FIXME move this logic to core module
if (pStmt == null) {
handleEvent(new ImportDataFailedEvent(tableName, tableConfig.getLineCount(), tableConfig.getInsertDML(), "Invalid parameters."));
return;
}
File file = new File(tableConfig.getFilePath());
boolean isFirstRowAsColumn = tableConfig.isFirstRowAsColumn();
String fileCharset = importConfig.getFilesCharset();
File parentFile;
if (file.exists()) {
parentFile = file.getParentFile();
} else {
parentFile = null;
}
CSVReader csvReader = null;
int currentRow = 0;
List<ImportRowData> rowList = new ArrayList<ImportRowData>();
try {
if (fileCharset == null || fileCharset.trim().length() == 0) {
csvReader = new CSVReader(new FileReader(file));
} else {
csvReader = new CSVReader(new InputStreamReader(new FileInputStream(file), fileCharset));
}
if (isFirstRowAsColumn) {
csvReader.readNext();
currentRow++;
}
String[] cvsRow;
ImportRowData rowData = null;
while ((cvsRow = csvReader.readNext()) != null) {
boolean isSuccess = true;
try {
/*Process the row data*/
rowData = processRowData(cvsRow, null, currentRow, parentFile);
rowList.add(rowData);
pStmt.addBatch();
currentRow++;
/*Process commit*/
if (rowList.size() >= importConfig.getCommitLine()) {
commit(rowList);
}
if (isCanceled) {
return;
}
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (StopPerformException ex) {
isSuccess = false;
handleEvent(new ImportDataTableFailedEvent(tableName));
break;
} finally {
if (!isSuccess) {
rowData.setStatus(ImportStatus.STATUS_COMMIT_FAILED);
writeErrorLog(rowData);
}
}
}
if (rowList.size() > 0) {
commit(rowList);
rowList = null;
}
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (csvReader != null) {
try {
csvReader.close();
csvReader = null;
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
}
Aggregations