use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class AbstractDdlTypesTest method testPlatformSpecificDdl.
@Test
public void testPlatformSpecificDdl() throws Exception {
dropTable();
createTable();
Table fromDb1 = platform.readTableFromDatabase(null, null, tableName());
assertNotNull(fromDb1);
dropTable();
Column[] columns1 = fromDb1.getColumns();
for (Column column : columns1) {
assertNotNull(column.findPlatformColumn(getName()));
}
String xml = DatabaseXmlUtil.toXml(fromDb1);
log.info("XML generated for table:\n" + xml);
StringReader reader = new StringReader(xml);
Table fromXml = DatabaseXmlUtil.read(reader, false).getTable(0);
for (Column column : fromXml.getColumns()) {
assertNotNull("Expected " + getName() + " platform specific column information for " + column.getName(), column.findPlatformColumn(getName()));
}
assertNotNull(fromXml);
platform.alterTables(false, fromXml);
Table fromDb2 = platform.readTableFromDatabase(null, null, tableName());
assertNotNull("Could not find " + tableName() + " in the database", fromDb2);
for (Column column1 : columns1) {
PlatformColumn pColumn1 = column1.findPlatformColumn(getName());
Column column2 = fromDb2.findColumn(column1.getName());
assertNotNull(column2);
PlatformColumn pColumn2 = column2.findPlatformColumn(getName());
assertNotNull(pColumn2);
assertEquals("Column types not equals for column " + column1.getName(), pColumn1.getType(), pColumn2.getType());
assertEquals(pColumn1.getSize(), pColumn2.getSize());
assertEquals(pColumn1.getDecimalDigits(), pColumn2.getDecimalDigits());
}
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class CsvTableDataReader method init.
@Override
protected void init() {
try {
this.csvReader = CsvUtils.getCsvReader(reader);
this.csvReader.setUseComments(true);
this.csvReader.readHeaders();
String[] columnNames = this.csvReader.getHeaders();
for (String columnName : columnNames) {
table.addColumn(new Column(columnName));
}
} catch (IOException e) {
throw new IoException(e);
}
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class ExtractDataReader method enhanceWithLobsFromSourceIfNeeded.
protected CsvData enhanceWithLobsFromSourceIfNeeded(Table table, CsvData data) {
if (this.currentSource.requiresLobsSelectedFromSource() && (data.getDataEventType() == DataEventType.UPDATE || data.getDataEventType() == DataEventType.INSERT)) {
List<Column> lobColumns = platform.getLobColumns(table);
if (lobColumns.size() > 0) {
String[] columnNames = table.getColumnNames();
String[] rowData = data.getParsedData(CsvData.ROW_DATA);
Column[] orderedColumns = table.getColumns();
Object[] objectValues = platform.getObjectValues(batch.getBinaryEncoding(), rowData, orderedColumns);
Map<String, Object> columnDataMap = CollectionUtils.toMap(columnNames, objectValues);
Column[] pkColumns = table.getPrimaryKeyColumns();
ISqlTemplate sqlTemplate = platform.getSqlTemplate();
Object[] args = new Object[pkColumns.length];
for (int i = 0; i < pkColumns.length; i++) {
args[i] = columnDataMap.get(pkColumns[i].getName());
}
for (Column lobColumn : lobColumns) {
String sql = buildSelect(table, lobColumn, pkColumns);
String valueForCsv = null;
if (platform.isBlob(lobColumn.getMappedTypeCode())) {
byte[] binaryData = sqlTemplate.queryForBlob(sql, lobColumn.getJdbcTypeCode(), lobColumn.getJdbcTypeName(), args);
if (binaryData != null) {
if (batch.getBinaryEncoding() == BinaryEncoding.BASE64) {
valueForCsv = new String(Base64.encodeBase64(binaryData));
} else if (batch.getBinaryEncoding() == BinaryEncoding.HEX) {
valueForCsv = new String(Hex.encodeHex(binaryData));
} else {
valueForCsv = new String(binaryData);
}
binaryData = null;
}
} else {
valueForCsv = sqlTemplate.queryForClob(sql, lobColumn.getJdbcTypeCode(), lobColumn.getJdbcTypeName(), args);
}
int index = ArrayUtils.indexOf(columnNames, lobColumn.getName());
rowData[index] = valueForCsv;
}
data.putParsedData(CsvData.ROW_DATA, rowData);
}
}
return data;
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class ExtractDataReader method buildSelect.
protected String buildSelect(Table table, Column lobColumn, Column[] pkColumns) {
StringBuilder sql = new StringBuilder("select ");
DatabaseInfo dbInfo = platform.getDatabaseInfo();
String quote = platform.getDdlBuilder().isDelimitedIdentifierModeOn() ? dbInfo.getDelimiterToken() : "";
sql.append(quote);
sql.append(lobColumn.getName());
sql.append(quote);
sql.append(",");
sql.delete(sql.length() - 1, sql.length());
sql.append(" from ");
sql.append(table.getQualifiedTableName(quote, dbInfo.getCatalogSeparator(), dbInfo.getSchemaSeparator()));
sql.append(" where ");
for (Column col : pkColumns) {
sql.append(quote);
sql.append(col.getName());
sql.append(quote);
sql.append("=? and ");
}
sql.delete(sql.length() - 5, sql.length());
return sql.toString();
}
use of org.jumpmind.db.model.Column in project symmetric-ds by JumpMind.
the class ProtocolDataReader method readNext.
public Object readNext() {
try {
Set<String> keys = null;
String schemaName = null;
String catalogName = null;
String[] parsedOldData = null;
long bytesRead = 0;
Table table = null;
while (tokens != null || csvReader.readRecord()) {
lineNumber++;
context.put(CTX_LINE_NUMBER, lineNumber);
if (tokens == null) {
tokens = csvReader.getValues();
}
bytesRead += logDebugAndCountBytes(tokens);
if (batch != null) {
statistics.get(batch).increment(DataReaderStatistics.READ_BYTE_COUNT, bytesRead);
bytesRead = 0;
}
if (table != null && !(tokens[0].equals(CsvConstants.TABLE) || tokens[0].equals(CsvConstants.KEYS) || tokens[0].equals(CsvConstants.COLUMNS))) {
return table;
}
if (tokens[0].equals(CsvConstants.INSERT)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.INSERT);
data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.OLD)) {
parsedOldData = CollectionUtils.copyOfRange(tokens, 1, tokens.length);
} else if (tokens[0].equals(CsvConstants.UPDATE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.UPDATE);
int columnCount = context.getLastParsedTable().getColumnCount();
if (tokens.length <= columnCount) {
String msg = String.format("Invalid state while parsing csv data. " + "The number of columns (%d) reported for table '%s' don't match up with the token data: %s", columnCount, context.getLastParsedTable().getFullyQualifiedTableName(), ArrayUtils.toString(tokens));
throw new IllegalStateException(msg);
}
data.putParsedData(CsvData.ROW_DATA, CollectionUtils.copyOfRange(tokens, 1, columnCount + 1));
data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, columnCount + 1, tokens.length));
data.putParsedData(CsvData.OLD_DATA, parsedOldData);
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.DELETE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.DELETE);
data.putParsedData(CsvData.PK_DATA, CollectionUtils.copyOfRange(tokens, 1, tokens.length));
data.putParsedData(CsvData.OLD_DATA, parsedOldData);
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.BATCH)) {
Batch batch = new Batch(batchType, Long.parseLong(tokens[1]), channelId, binaryEncoding, sourceNodeId, targetNodeId, false);
statistics.put(batch, new DataReaderStatistics());
tokens = null;
return batch;
} else if (tokens[0].equals(CsvConstants.NO_BINARY_OLD_DATA)) {
if (tokens.length > 1) {
noBinaryOldData = Boolean.parseBoolean(tokens[1]);
}
} else if (tokens[0].equals(CsvConstants.NODEID)) {
this.sourceNodeId = tokens[1];
} else if (tokens[0].equals(CsvConstants.BINARY)) {
this.binaryEncoding = BinaryEncoding.valueOf(tokens[1]);
} else if (tokens[0].equals(CsvConstants.CHANNEL)) {
this.channelId = tokens[1];
} else if (tokens[0].equals(CsvConstants.SCHEMA)) {
schemaName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
} else if (tokens[0].equals(CsvConstants.CATALOG)) {
catalogName = tokens.length == 1 || StringUtils.isBlank(tokens[1]) ? null : tokens[1];
} else if (tokens[0].equals(CsvConstants.TABLE)) {
String tableName = tokens[1];
table = context.getParsedTables().get(Table.getFullyQualifiedTableName(catalogName, schemaName, tableName));
if (table != null) {
context.setLastParsedTable(table);
} else {
table = new Table(catalogName, schemaName, tableName);
context.setLastParsedTable(table);
}
} else if (tokens[0].equals(CsvConstants.KEYS)) {
if (keys == null) {
keys = new HashSet<String>(tokens.length);
}
for (int i = 1; i < tokens.length; i++) {
keys.add(tokens[i]);
}
} else if (tokens[0].equals(CsvConstants.COLUMNS)) {
table.removeAllColumns();
for (int i = 1; i < tokens.length; i++) {
Column column = new Column(tokens[i], keys != null && keys.contains(tokens[i]));
table.addColumn(column);
}
context.getParsedTables().put(table.getFullyQualifiedTableName(), table);
} else if (tokens[0].equals(CsvConstants.COMMIT)) {
if (batch != null) {
batch.setComplete(true);
}
tokens = null;
return null;
} else if (tokens[0].equals(CsvConstants.SQL)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.SQL);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.BSH)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.BSH);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.CREATE)) {
CsvData data = new CsvData();
data.setNoBinaryOldData(noBinaryOldData);
data.setDataEventType(DataEventType.CREATE);
data.putParsedData(CsvData.ROW_DATA, new String[] { tokens[1] });
tokens = null;
return data;
} else if (tokens[0].equals(CsvConstants.IGNORE)) {
if (batch != null) {
batch.setIgnored(true);
}
} else {
log.info("Unable to handle unknown csv values: " + Arrays.toString(tokens));
}
tokens = null;
}
} catch (IOException ex) {
throw new IoException(ex);
}
return null;
}
Aggregations