use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class AbsImportRunnable method writeErrorLog.
/**
* Write error Log
*
*/
protected void writeErrorLog(List<ImportRowData> rowList) {
// FIXME move this logic to core module
if (errLogFolder == null) {
return;
}
boolean isNeedHeader = false;
if (rowList == null || rowList.size() == 0) {
return;
}
boolean csvErrorLog = true;
if (StringUtil.isNotEmpty(rowList.get(0).getSql())) {
csvErrorLog = false;
}
if (errorLogWriter == null) {
isNeedHeader = true;
initErrorWriter(csvErrorLog);
}
try {
/* If need writer header */
if (isNeedHeader && csvErrorLog) {
int columnSize = 1;
if (rowList.get(0).getColumnList().size() > 1) {
columnSize = rowList.get(0).getColumnList().size();
}
errorLogWriter.write(getCSVHeader(columnSize));
errorLogWriter.write(StringUtil.NEWLINE);
}
/* Writer error log */
for (int i = 0; i < rowList.size(); i++) {
ImportRowData row = rowList.get(i);
if (row == null) {
LOGGER.error("Index:" + i + " ImportRowData is a null.");
continue;
}
if (csvErrorLog) {
errorLogWriter.write(getCSVErrorLog(row));
} else {
errorLogWriter.write(row.getSql());
}
errorLogWriter.write(StringUtil.NEWLINE);
}
errorLogWriter.flush();
} catch (IOException e) {
LOGGER.error(e.getMessage());
}
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromTxtRunnable method doRun.
/* (non-Javadoc)
* @see com.cubrid.common.ui.cubrid.table.dialog.imp.progress.AbsImportDataThread#doRun()
*/
@Override
protected void doRun() throws Exception {
// FIXME move this logic to core module
if (pStmt == null) {
handleEvent(new ImportDataFailedEvent(tableName, tableConfig.getLineCount(), tableConfig.getInsertDML(), "Invalid parameters."));
return;
}
File file = new File(tableConfig.getFilePath());
boolean isFirstRowAsCoulmn = tableConfig.isFirstRowAsColumn();
File parentFile;
if (file.exists()) {
parentFile = file.getParentFile();
} else {
parentFile = null;
}
int currentRow = 0;
TxtReader txtReader = null;
List<ImportRowData> rowList = new ArrayList<ImportRowData>();
try {
if (importConfig.getFilesCharset() == null || importConfig.getFilesCharset().trim().length() == 0) {
txtReader = new TxtReader(new FileReader(file), importConfig.getColumnDelimiter(), importConfig.getRowDelimiter());
} else {
txtReader = new TxtReader(new InputStreamReader(new FileInputStream(file), importConfig.getFilesCharset()), importConfig.getColumnDelimiter(), importConfig.getRowDelimiter());
}
if (isFirstRowAsCoulmn) {
txtReader.readNextRow();
currentRow++;
}
String[] txtRow;
ImportRowData rowData = null;
while ((txtRow = txtReader.readNextRow()) != null) {
boolean isSuccess = true;
try {
if (txtRow == null || txtRow.length == 0) {
continue;
}
/*Process the row data*/
rowData = processRowData(txtRow, null, currentRow, parentFile);
rowList.add(rowData);
pStmt.addBatch();
currentRow++;
/*Process commit*/
if (rowList.size() >= importConfig.getCommitLine()) {
commit(rowList);
}
if (isCanceled) {
return;
}
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (StopPerformException ex) {
isSuccess = false;
handleEvent(new ImportDataTableFailedEvent(tableName));
LOGGER.debug("Stop import by user setting.");
break;
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (!isSuccess) {
rowData.setStatus(ImportStatus.STATUS_COMMIT_FAILED);
writeErrorLog(rowData);
}
}
}
if (rowList.size() > 0) {
commit(rowList);
rowList = null;
}
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (txtReader != null) {
try {
txtReader.close();
txtReader = null;
} catch (IOException e) {
LOGGER.error("", e);
}
}
}
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromXlsxRunnable method doRun.
/* (non-Javadoc)
* @see com.cubrid.common.ui.cubrid.table.dialog.imp.progress.AbsImportDataThread#doRun()
*/
@Override
protected void doRun() throws Exception {
// FIXME move this logic to core module
if (pStmt == null) {
handleEvent(new ImportDataFailedEvent(tableName, tableConfig.getLineCount(), tableConfig.getInsertDML(), "Invalid parameters."));
return;
}
String fileName = tableConfig.getFilePath();
final File parentFile;
File file = new File(fileName);
if (file.exists()) {
parentFile = file.getParentFile();
} else {
parentFile = null;
}
final XLSXImportFileHandler importFileHandler = (XLSXImportFileHandler) ImportFileHandlerFactory.getHandler(fileName, importConfig);
final List<ImportRowData> rowList = new ArrayList<ImportRowData>();
XlsxReaderHandler xlsxReader = new XlsxReaderHandler((XLSXImportFileHandler) importFileHandler) {
boolean isFirstRowAsColumn = tableConfig.isFirstRowAsColumn();
private String[] rowContentArray;
private ImportRowData rowData = null;
private boolean isFailed = false;
public void operateRows(int sheetIndex, List<String> rowContentlist) {
if (isFailed) {
return;
}
if (currentRow == getTitleRow()) {
return;
}
if (rowContentlist == null) {
return;
}
rowContentArray = new String[rowContentlist.size()];
rowContentlist.toArray(rowContentArray);
boolean isSuccess = true;
try {
/*Process the row data*/
rowData = processRowData(rowContentArray, null, currentRow, parentFile);
rowList.add(rowData);
pStmt.addBatch();
importedRow++;
if (rowList.size() >= importConfig.getCommitLine()) {
commit(rowList);
}
if (isCanceled) {
return;
}
} catch (SQLException ex) {
isSuccess = false;
LOGGER.debug(ex.getMessage());
} catch (StopPerformException ex) {
isSuccess = false;
handleEvent(new ImportDataTableFailedEvent(tableName));
LOGGER.debug("Stoped by user setting.");
isFailed = true;
} catch (OutOfMemoryError error) {
throw new RuntimeException(error);
} finally {
if (!isSuccess) {
rowData.setStatus(ImportStatus.STATUS_COMMIT_FAILED);
writeErrorLog(rowData);
}
}
}
public void startDocument() {
if (isFirstRowAsColumn) {
setTitleRow(0);
}
}
};
xlsxReader.process(fileName);
if (rowList.size() > 0) {
commit(rowList);
}
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromSQLRunnable method loadSQL.
/**
* load SQL file by bufferedReader
*
* @param reader
* @return
*/
protected List<ImportRowData> loadSQL(BufferedReader reader) {
// FIXME move this logic to core module
List<ImportRowData> sqlList = new ArrayList<ImportRowData>();
try {
String tempString = null;
//the first line which sql doesnt'in one line
int seprateSQLLineNumber = lineNumber;
String preString = "";
//parse /**/ comment
boolean parseCommentFlag = false;
int sizeCount = 0;
while ((tempString = reader.readLine()) != null) {
sizeCount += tempString.getBytes().length;
tempString = tempString.trim();
if (parseCommentFlag && tempString.endsWith("*/")) {
parseCommentFlag = false;
lineNumber++;
continue;
} else if (parseCommentFlag) {
lineNumber++;
continue;
}
if (tempString.trim().startsWith("/*")) {
if (tempString.trim().endsWith("*/")) {
lineNumber++;
continue;
} else {
parseCommentFlag = true;
lineNumber++;
continue;
}
}
if (StringUtil.isEmpty(tempString) || tempString.trim().startsWith("--") || tempString.trim().startsWith("//")) {
lineNumber++;
continue;
}
if (StringUtil.isNotEmpty(preString)) {
tempString = preString + tempString;
}
List<String[]> qList = StringUtil.extractQueries(tempString);
if (qList.isEmpty()) {
if (StringUtil.isEmpty(preString)) {
preString = preString + tempString + "\n";
seprateSQLLineNumber = lineNumber;
} else {
preString = tempString + "\n";
}
} else {
for (int i = 0; i < qList.size(); i++) {
if (StringUtil.isEmpty(preString)) {
seprateSQLLineNumber = lineNumber;
}
String[] o = qList.get(i);
String sql = o[0].toString();
if (sql.endsWith(";")) {
ImportRowData rowData = new ImportRowData(seprateSQLLineNumber);
rowData.setSql(sql);
rowData.setWorkSize((sizeCount / qList.size()));
sqlList.add(rowData);
if (i == qList.size() - 1 && (Integer.valueOf(o[1]) + 1 < tempString.length())) {
preString = tempString.substring(Integer.valueOf(o[1]) + 1, tempString.length());
} else {
preString = "";
}
} else {
preString = preString.substring(Integer.valueOf(o[1]), tempString.length());
break;
}
}
sizeCount = 0;
if (sqlList.size() + 1 > commitCount) {
lineNumber++;
return sqlList;
}
}
lineNumber++;
}
} catch (Exception e) {
LOGGER.error("", e);
}
end = true;
return sqlList;
}
use of com.cubrid.common.ui.cubrid.table.dialog.imp.model.ImportRowData in project cubrid-manager by CUBRID.
the class ImportFromSQLRunnable method commit.
/**
* Commit the data
*
* @param importDataEventHandler IImportDataEventHandler
* @param rowList List<RowData>
*/
protected void commit(List<ImportRowData> rowList) {
// FIXME move this logic to core module
if (stmt == null || conn == null) {
return;
}
String sql = "";
int importCount = 0;
long currentRunCount = 0;
int totalWorkedSize = 0;
List<ImportRowData> batchDataList = new ArrayList<ImportRowData>();
for (ImportRowData rowData : rowList) {
batchDataList.add(rowData);
sql = rowData.getSql();
try {
importCount++;
currentRunCount++;
totalWorkedSize += rowData.getWorkSize();
stmt.execute(sql);
if (importCount % commitCount == 0) {
QueryUtil.commit(conn);
ImportDataSuccessEvent successEvt = new ImportDataSuccessEvent(fileName, importCount);
successEvt.setWorkedSize(totalWorkedSize);
handleEvent(successEvt);
LOGGER.debug("Committed : currentRunCount={}, commitCount={}", currentRunCount, importCount);
importCount = 0;
totalWorkedSize = 0;
batchDataList.clear();
}
} catch (Exception e) {
for (ImportRowData batchData : batchDataList) {
String tempSql = batchData.getSql();
String errMessage = Messages.msgFailedByRollback;
if (StringUtil.isEqual(tempSql, sql)) {
errMessage = e.getMessage();
}
ImportDataFailedEvent failedEvt = new ImportDataFailedEvent(fileName, 1, tempSql, errMessage);
failedEvt.setWorkedSize(batchData.getWorkSize());
handleEvent(failedEvt);
writeErrorLog(batchData);
}
QueryUtil.rollback(conn);
LOGGER.debug("Execute SQL from SQL file sql : {}, error message: {}", sql, e);
importCount = 0;
totalWorkedSize = 0;
batchDataList.clear();
}
}
if (importCount > 0) {
QueryUtil.commit(conn);
ImportDataSuccessEvent successEvt = new ImportDataSuccessEvent(fileName, importCount);
successEvt.setWorkedSize(totalWorkedSize);
handleEvent(successEvt);
LOGGER.debug("Committed : currentRunCount={}, commitCount={}", currentRunCount, importCount);
importCount = 0;
totalWorkedSize = 0;
batchDataList.clear();
}
}
Aggregations