use of org.jumpmind.exception.IoException in project symmetric-ds by JumpMind.
the class FtpDataWriter method createFile.
protected void createFile(Table table, Batch batch) {
FileInfo fileInfo = fileInfoByTable.get(table.getFullyQualifiedTableName());
if (fileInfo == null) {
try {
fileInfo = new FileInfo();
fileInfo.outputFile = new File(stagingDir, batch.getBatchId() + "-" + table.getFullyQualifiedTableName() + "." + format.name().toLowerCase());
fileInfo.outputFile.getParentFile().mkdirs();
fileInfo.outputFileWriter = new BufferedWriter(new FileWriter(fileInfo.outputFile));
fileInfoByTable.put(table.getFullyQualifiedTableName(), fileInfo);
if (format == Format.CSV) {
printCsvTableHeader();
}
} catch (IOException e) {
throw new IoException(e);
}
}
}
use of org.jumpmind.exception.IoException in project symmetric-ds by JumpMind.
the class DataExtractorService method transferFromStaging.
protected void transferFromStaging(ExtractMode mode, BatchType batchType, OutgoingBatch batch, boolean isRetry, IStagedResource stagedResource, BufferedWriter writer, DataContext context, BigDecimal maxKBytesPerSec) {
final int MAX_WRITE_LENGTH = 32768;
BufferedReader reader = stagedResource.getReader();
try {
// retry the batch from the target's staging
if (isRetry) {
String line = null;
while ((line = reader.readLine()) != null) {
if (line.startsWith(CsvConstants.BATCH)) {
writer.write(CsvConstants.RETRY + "," + batch.getBatchId());
writer.newLine();
writer.write(CsvConstants.COMMIT + "," + batch.getBatchId());
writer.newLine();
break;
} else {
writer.write(line);
writer.newLine();
}
}
} else {
long totalCharsRead = 0, totalBytesRead = 0;
int numCharsRead = 0, numBytesRead = 0;
long startTime = System.currentTimeMillis(), ts = startTime, bts = startTime;
boolean isThrottled = maxKBytesPerSec != null && maxKBytesPerSec.compareTo(BigDecimal.ZERO) > 0;
long totalThrottleTime = 0;
int bufferSize = MAX_WRITE_LENGTH;
if (isThrottled) {
bufferSize = maxKBytesPerSec.multiply(new BigDecimal(1024)).intValue();
}
char[] buffer = new char[bufferSize];
while ((numCharsRead = reader.read(buffer)) != -1) {
writer.write(buffer, 0, numCharsRead);
totalCharsRead += numCharsRead;
if (Thread.currentThread().isInterrupted()) {
throw new IoException("This thread was interrupted");
}
long batchStatusUpdateMillis = parameterService.getLong(ParameterConstants.OUTGOING_BATCH_UPDATE_STATUS_MILLIS);
if (System.currentTimeMillis() - ts > batchStatusUpdateMillis && batch.getStatus() != Status.SE && batch.getStatus() != Status.RS) {
changeBatchStatus(Status.SE, batch, mode);
}
if (System.currentTimeMillis() - ts > 60000) {
log.info("Batch '{}', for node '{}', for process 'send from stage' has been processing for {} seconds. " + "The following stats have been gathered: {}", new Object[] { batch.getBatchId(), batch.getNodeId(), (System.currentTimeMillis() - startTime) / 1000, "CHARS=" + totalCharsRead });
ts = System.currentTimeMillis();
}
if (isThrottled) {
numBytesRead += new String(buffer, 0, numCharsRead).getBytes().length;
totalBytesRead += numBytesRead;
if (numBytesRead >= bufferSize) {
long expectedMillis = (long) (((numBytesRead / 1024f) / maxKBytesPerSec.floatValue()) * 1000);
long actualMillis = System.currentTimeMillis() - bts;
if (actualMillis < expectedMillis) {
totalThrottleTime += expectedMillis - actualMillis;
Thread.sleep(expectedMillis - actualMillis);
}
numBytesRead = 0;
bts = System.currentTimeMillis();
}
} else {
totalBytesRead += new String(buffer, 0, numCharsRead).getBytes().length;
}
}
statisticManager.incrementDataSent(batch.getChannelId(), batch.getDataEventCount());
statisticManager.incrementDataBytesSent(batch.getChannelId(), totalBytesRead);
if (log.isDebugEnabled() && totalThrottleTime > 0) {
log.debug("Batch '{}' for node '{}' took {}ms for {} bytes and was throttled for {}ms because limit is set to {} KB/s", batch.getBatchId(), batch.getNodeId(), (System.currentTimeMillis() - startTime), totalBytesRead, totalThrottleTime, maxKBytesPerSec);
}
}
if (writer instanceof BatchBufferedWriter) {
((BatchBufferedWriter) writer).getBatchIds().add(batch.getBatchId());
}
} catch (Throwable t) {
throw new RuntimeException(t);
} finally {
stagedResource.close();
stagedResource.dereference();
if (!stagedResource.isFileResource() && !stagedResource.isInUse()) {
stagedResource.delete();
}
}
}
use of org.jumpmind.exception.IoException in project symmetric-ds by JumpMind.
the class CsvUtils method escapeCsvData.
public static String escapeCsvData(String[] data, char recordDelimiter, char textQualifier, int escapeMode, String nullString) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
CsvWriter writer = new CsvWriter(new OutputStreamWriter(out), ',');
writer.setEscapeMode(escapeMode);
if (recordDelimiter != '\0') {
writer.setRecordDelimiter(recordDelimiter);
}
if (textQualifier != '\0') {
writer.setTextQualifier(textQualifier);
writer.setUseTextQualifier(true);
writer.setForceQualifier(true);
}
if (nullString != null) {
writer.setNullString(nullString);
}
try {
writer.writeRecord(data, true);
} catch (IOException e) {
throw new IoException();
}
writer.close();
return out.toString();
}
use of org.jumpmind.exception.IoException in project symmetric-ds by JumpMind.
the class CsvUtils method escapeCsvData.
public static String escapeCsvData(String[] data) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
CsvWriter writer = new CsvWriter(new OutputStreamWriter(out), ',');
writer.setEscapeMode(CsvWriter.ESCAPE_MODE_BACKSLASH);
writer.setTextQualifier('\"');
writer.setUseTextQualifier(true);
writer.setForceQualifier(true);
for (String s : data) {
try {
writer.write(s, true);
} catch (IOException e) {
throw new IoException();
}
}
writer.close();
return out.toString();
}
use of org.jumpmind.exception.IoException in project symmetric-ds by JumpMind.
the class XmlDataReader method readNext.
protected void readNext() {
try {
Map<String, String> rowData = new LinkedHashMap<String, String>();
String columnName = null;
CsvData data = null;
Table table = null;
String catalog = null;
String schema = null;
int eventType = parser.next();
while (eventType != XmlPullParser.END_DOCUMENT) {
switch(eventType) {
case XmlPullParser.TEXT:
if (columnName != null) {
rowData.put(columnName, parser.getText());
columnName = null;
}
break;
case XmlPullParser.START_TAG:
String name = parser.getName();
if ("row".equalsIgnoreCase(name)) {
data = new CsvData();
if (table != null) {
table.removeAllColumns();
}
data.setDataEventType(DataEventType.INSERT);
} else if ("field".equalsIgnoreCase(name)) {
boolean nullValue = false;
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("name".equalsIgnoreCase(attributeName)) {
columnName = attributeValue;
} else if ("xsi:nil".equalsIgnoreCase(attributeName)) {
nullValue = true;
}
}
if (nullValue) {
rowData.put(columnName, null);
columnName = null;
}
} else if ("table_data".equalsIgnoreCase(name)) {
Batch batch = new Batch();
batch.setBinaryEncoding(BinaryEncoding.BASE64);
next.add(batch);
table = new Table();
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("name".equalsIgnoreCase(attributeName)) {
table.setName(attributeValue);
}
}
next.add(table);
} else if ("table".equalsIgnoreCase(name)) {
Batch batch = new Batch();
batch.setBinaryEncoding(BinaryEncoding.BASE64);
next.add(batch);
table = DatabaseXmlUtil.nextTable(parser);
next.add(table);
Database db = new Database();
db.setName("dbimport");
db.setCatalog(catalog);
db.setSchema(schema);
db.addTable(table);
String xml = DatabaseXmlUtil.toXml(db);
data = new CsvData(DataEventType.CREATE);
data.putCsvData(CsvData.ROW_DATA, CsvUtils.escapeCsvData(xml));
next.add(data);
} else if ("database".equalsIgnoreCase(name)) {
for (int i = 0; i < parser.getAttributeCount(); i++) {
String attributeName = parser.getAttributeName(i);
String attributeValue = parser.getAttributeValue(i);
if ("catalog".equalsIgnoreCase(attributeName)) {
catalog = attributeValue;
} else if ("schema".equalsIgnoreCase(attributeName)) {
schema = attributeValue;
}
}
}
break;
case XmlPullParser.END_TAG:
name = parser.getName();
if ("row".equalsIgnoreCase(name)) {
String[] columnNames = rowData.keySet().toArray(new String[rowData.keySet().size()]);
for (String colName : columnNames) {
table.addColumn(new Column(colName));
}
String[] columnValues = rowData.values().toArray(new String[rowData.values().size()]);
data.putParsedData(CsvData.ROW_DATA, columnValues);
if (this.table == null || !this.table.equals(table)) {
next.add(table);
}
next.add(data);
rowData = new LinkedHashMap<String, String>();
} else if ("table_data".equalsIgnoreCase(name)) {
if (batch != null) {
batch.setComplete(true);
}
} else if ("field".equalsIgnoreCase(name)) {
columnName = null;
}
break;
}
eventType = parser.next();
}
} catch (IOException ex) {
throw new IoException(ex);
} catch (XmlPullParserException ex) {
throw new RuntimeException(ex);
}
}
Aggregations