use of org.jkiss.dbeaver.model.impl.local.LocalStatement in project dbeaver by dbeaver.
the class DataImporterCSV method runImport.
@Override
public void runImport(@NotNull DBRProgressMonitor monitor, @NotNull DBPDataSource streamDataSource, @NotNull InputStream inputStream, @NotNull IDataTransferConsumer consumer) throws DBException {
IStreamDataImporterSite site = getSite();
StreamEntityMapping entityMapping = site.getSourceObject();
Map<String, Object> properties = site.getProcessorProperties();
HeaderPosition headerPosition = getHeaderPosition(properties);
boolean emptyStringNull = CommonUtils.getBoolean(properties.get(PROP_EMPTY_STRING_NULL), false);
String nullValueMark = CommonUtils.toString(properties.get(PROP_NULL_STRING));
DBCExecutionContext context = streamDataSource.getDefaultInstance().getDefaultContext(monitor, false);
try (DBCSession producerSession = context.openSession(monitor, DBCExecutionPurpose.UTIL, "Transfer stream data")) {
LocalStatement localStatement = new LocalStatement(producerSession, "SELECT * FROM Stream");
StreamTransferResultSet resultSet = new StreamTransferResultSet(producerSession, localStatement, entityMapping);
consumer.fetchStart(producerSession, resultSet, -1, -1);
applyTransformHints(resultSet, consumer, properties, PROP_TIMESTAMP_FORMAT, PROP_TIMESTAMP_ZONE);
try (Reader reader = openStreamReader(inputStream, properties)) {
try (CSVReader csvReader = openCSVReader(reader, properties)) {
int maxRows = site.getSettings().getMaxRows();
int targetAttrSize = entityMapping.getStreamColumns().size();
boolean headerRead = false;
for (int lineNum = 0; ; ) {
if (monitor.isCanceled()) {
break;
}
String[] line = csvReader.readNext();
if (line == null) {
break;
}
if (line.length == 0) {
continue;
}
if (headerPosition != HeaderPosition.none && !headerRead) {
// First line is a header
headerRead = true;
continue;
}
if (maxRows > 0 && lineNum >= maxRows) {
break;
}
if (line.length < targetAttrSize) {
// Stream row may be shorter than header
String[] newLine = new String[targetAttrSize];
System.arraycopy(line, 0, newLine, 0, line.length);
for (int i = line.length; i < targetAttrSize; i++) {
newLine[i] = null;
}
line = newLine;
}
if (emptyStringNull) {
for (int i = 0; i < line.length; i++) {
if ("".equals(line[i])) {
line[i] = null;
}
}
}
if (!CommonUtils.isEmpty(nullValueMark)) {
for (int i = 0; i < line.length; i++) {
if (nullValueMark.equals(line[i])) {
line[i] = null;
}
}
}
resultSet.setStreamRow(line);
consumer.fetchRow(producerSession, resultSet);
lineNum++;
if (lineNum % 1000 == 0) {
monitor.subTask(String.valueOf(lineNum) + " rows processed");
}
}
}
} catch (IOException e) {
throw new DBException("IO error reading CSV", e);
} finally {
try {
consumer.fetchEnd(producerSession, resultSet);
} finally {
consumer.close();
}
}
}
}
use of org.jkiss.dbeaver.model.impl.local.LocalStatement in project dbeaver by serge-rider.
the class DataImporterCSV method runImport.
@Override
public void runImport(@NotNull DBRProgressMonitor monitor, @NotNull DBPDataSource streamDataSource, @NotNull InputStream inputStream, @NotNull IDataTransferConsumer consumer) throws DBException {
IStreamDataImporterSite site = getSite();
StreamEntityMapping entityMapping = site.getSourceObject();
Map<String, Object> properties = site.getProcessorProperties();
HeaderPosition headerPosition = getHeaderPosition(properties);
boolean emptyStringNull = CommonUtils.getBoolean(properties.get(PROP_EMPTY_STRING_NULL), false);
String nullValueMark = CommonUtils.toString(properties.get(PROP_NULL_STRING));
DBCExecutionContext context = streamDataSource.getDefaultInstance().getDefaultContext(monitor, false);
try (DBCSession producerSession = context.openSession(monitor, DBCExecutionPurpose.UTIL, "Transfer stream data")) {
LocalStatement localStatement = new LocalStatement(producerSession, "SELECT * FROM Stream");
StreamTransferResultSet resultSet = new StreamTransferResultSet(producerSession, localStatement, entityMapping);
consumer.fetchStart(producerSession, resultSet, -1, -1);
applyTransformHints(resultSet, consumer, properties, PROP_TIMESTAMP_FORMAT, PROP_TIMESTAMP_ZONE);
try (Reader reader = openStreamReader(inputStream, properties)) {
try (CSVReader csvReader = openCSVReader(reader, properties)) {
int maxRows = site.getSettings().getMaxRows();
int targetAttrSize = entityMapping.getStreamColumns().size();
boolean headerRead = false;
for (int lineNum = 0; ; ) {
if (monitor.isCanceled()) {
break;
}
String[] line = csvReader.readNext();
if (line == null) {
break;
}
if (line.length == 0) {
continue;
}
if (headerPosition != HeaderPosition.none && !headerRead) {
// First line is a header
headerRead = true;
continue;
}
if (maxRows > 0 && lineNum >= maxRows) {
break;
}
if (line.length < targetAttrSize) {
// Stream row may be shorter than header
String[] newLine = new String[targetAttrSize];
System.arraycopy(line, 0, newLine, 0, line.length);
for (int i = line.length; i < targetAttrSize; i++) {
newLine[i] = null;
}
line = newLine;
}
if (emptyStringNull) {
for (int i = 0; i < line.length; i++) {
if ("".equals(line[i])) {
line[i] = null;
}
}
}
if (!CommonUtils.isEmpty(nullValueMark)) {
for (int i = 0; i < line.length; i++) {
if (nullValueMark.equals(line[i])) {
line[i] = null;
}
}
}
resultSet.setStreamRow(line);
consumer.fetchRow(producerSession, resultSet);
lineNum++;
if (lineNum % 1000 == 0) {
monitor.subTask(String.valueOf(lineNum) + " rows processed");
}
}
}
} catch (IOException e) {
throw new DBException("IO error reading CSV", e);
} finally {
try {
consumer.fetchEnd(producerSession, resultSet);
} finally {
consumer.close();
}
}
}
}
Aggregations