use of com.axway.ats.core.dbaccess.DbRecordValue in project ats-framework by Axway.
the class CassandraEnvironmentHandler method writeTableToFile.
@Override
protected void writeTableToFile(List<ColumnDescription> columns, DbTable table, DbRecordValuesList[] records, FileWriter fileWriter) throws IOException, ParseException {
if (this.includeDeleteStatements) {
fileWriter.write("TRUNCATE " + table.getTableName() + ";" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR);
}
if (records.length > 0) {
String counterColumnName = getCounterColumn(records);
if (counterColumnName == null) {
final String insertBegin = "INSERT INTO " + table.getTableName() + "(" + getColumnsString(columns) + ") VALUES (";
final String insertEnd = ");" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR;
StringBuilder insertStatement = new StringBuilder();
for (DbRecordValuesList dbRow : records) {
insertStatement.setLength(0);
insertStatement.append(insertBegin);
for (int i = 0; i < dbRow.size(); i++) {
// extract specific values depending on their type
insertStatement.append(extractValue(columns.get(i), dbRow.get(i).getValue()));
insertStatement.append(",");
}
//remove the last comma
insertStatement.delete(insertStatement.length() - 1, insertStatement.length());
insertStatement.append(insertEnd);
fileWriter.write(insertStatement.toString());
}
} else {
// This is the way to insert rows for tables having counter data type. Only one column could have such type.
// others are UUIDs. SET counterColumnName=+3/-2 just increases/decreases such counter. If not already existing
// then 0 is assumed as initial value.
String insertBegin = "UPDATE " + table.getTableName() + " SET " + counterColumnName + " = " + counterColumnName + " + <the counter value> WHERE ";
final String insertEnd = EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR;
StringBuilder insertStatement = new StringBuilder();
for (DbRecordValuesList dbRow : records) {
insertStatement.setLength(0);
insertStatement.append(insertBegin);
String counterValue = "";
for (int i = 0; i < dbRow.size(); i++) {
DbRecordValue dbValue = dbRow.get(i);
String dbColumnName = dbValue.getDbColumn().getColumnName();
if (dbColumnName.equals(counterColumnName)) {
// this is a counter, we will apply it later
counterValue = dbValue.getValue().toString();
} else {
// extract specific values depending on their type
insertStatement.append(dbColumnName + " = " + extractValue(columns.get(i), dbValue.getValue()));
insertStatement.append(" AND ");
}
}
//remove the last 'AND'
insertStatement.delete(insertStatement.length() - 5, insertStatement.length());
insertStatement.append(insertEnd);
fileWriter.write(insertStatement.toString().replace("<the counter value>", counterValue));
}
}
}
}
use of com.axway.ats.core.dbaccess.DbRecordValue in project ats-framework by Axway.
the class DbFolder method refresh.
private void refresh() throws RbvException {
newMetaDataMap = new HashMap<String, MetaData>();
// store the current meta data map and clear the map holding all meta data
// this way we will be able to detect any changes including added and removed
// meta data
HashMap<String, MetaData> oldMetaDataMap = allMetaDataMap;
allMetaDataMap = new HashMap<String, MetaData>();
log.debug("Run DB query '" + this.searchQuery.getQuery() + "'");
DbRecordValuesList[] queryResults;
try {
queryResults = dbProvider.select(this.searchQuery);
} catch (DbException dbe) {
throw new RbvException(dbe);
}
if (queryResults != null) {
for (DbRecordValuesList queryResult : queryResults) {
DbMetaData currentData = new DbMetaData();
StringBuffer metaDataHash = new StringBuffer();
for (DbRecordValue recordValue : queryResult) {
DbMetaDataKey key = new DbMetaDataKey(recordValue.getDbColumn());
Object value = recordValue.getValue();
currentData.putProperty(key.toString(), value);
// calculate the hash
metaDataHash.append(key.toString());
metaDataHash.append(recordValue.getValueAsString());
}
try {
// compute MD5 so we don't keep the whole StringBuffer in memory
MessageDigest metaDataHashDigest = MessageDigest.getInstance("MD5");
String metaDataSum = new String(metaDataHashDigest.digest(metaDataHash.toString().getBytes()));
if (!oldMetaDataMap.containsKey(metaDataSum)) {
newMetaDataMap.put(metaDataSum, currentData);
}
// always put the record in the map holding all meta data
allMetaDataMap.put(metaDataSum, currentData);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
didPollingOccured = true;
}
}
use of com.axway.ats.core.dbaccess.DbRecordValue in project ats-framework by Axway.
the class PostgreSqlEnvironmentHandler method writeTableToFile.
@Override
protected void writeTableToFile(List<ColumnDescription> columns, DbTable table, DbRecordValuesList[] records, Writer fileWriter) throws IOException {
String fullTableName = getFullTableName(table);
if (this.addLocks && table.isLockTable()) {
// LOCK this single table for update. Lock is released after delete and then insert of backup data.
// This leads to less potential data integrity issues. If another process updates tables at same time
// LOCK at once for ALL tables is not applicable as in reality DB connection hangs/blocked
// PgSQL: ACCESS EXCLUSIVE is most restricted mode
// https://www.postgresql.org/docs/current/explicit-locking.html
fileWriter.write("LOCK TABLE " + fullTableName + " IN EXCLUSIVE MODE;" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR);
}
if (table.getAutoIncrementResetValue() != null) {
fileWriter.write("TODO: fixme SET SESSION sql_mode='NO_AUTO_VALUE_ON_ZERO';" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR);
fileWriter.write("TODO: fixme ALTER TABLE " + fullTableName + " AUTO_INCREMENT = " + table.getAutoIncrementResetValue() + ";" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR);
}
if (records.length > 0) {
StringBuilder insertStatement = new StringBuilder();
String insertBegin = "INSERT INTO " + getFullTableName(table) + " (" + getColumnsString(columns) + ") VALUES(";
String insertEnd = ");" + EOL_MARKER + AtsSystemProperties.SYSTEM_LINE_SEPARATOR;
for (DbRecordValuesList record : records) {
// clear the StringBuilder current data
// it is a little better (almost the same) than insertStatement.setLength( 0 ); as performance
insertStatement.delete(0, insertStatement.length());
insertStatement.append(insertBegin);
for (int i = 0; i < record.size(); i++) {
DbRecordValue recordValue = record.get(i);
String fieldValue = (String) recordValue.getValue();
if (fieldValue == null) {
fieldValue = "NULL";
}
// extract specific values depending on their type
insertStatement.append(extractValue(columns.get(i), fieldValue));
insertStatement.append(",");
}
// remove the last comma
insertStatement.delete(insertStatement.length() - 1, insertStatement.length());
insertStatement.append(insertEnd);
fileWriter.write(insertStatement.toString());
fileWriter.flush();
}
}
// unlock table - no UNLOCK table command. Unlock is automatic at the end of transaction
fileWriter.write(AtsSystemProperties.SYSTEM_LINE_SEPARATOR);
}
use of com.axway.ats.core.dbaccess.DbRecordValue in project ats-framework by Axway.
the class Test_MysqlEnvironmentHandler method createBackupSkipColumnsPositive.
@Test
public void createBackupSkipColumnsPositive() throws DatabaseEnvironmentCleanupException, DbException, IOException, ParseException, SQLException {
// the columns meta data
DbRecordValuesList column1MetaData = new DbRecordValuesList();
column1MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name1"));
column1MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "varchar(32)"));
column1MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", true));
DbRecordValuesList column2MetaData = new DbRecordValuesList();
column2MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name2"));
column2MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "varchar(32)"));
column2MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", true));
DbRecordValuesList column3MetaData = new DbRecordValuesList();
column3MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name3"));
column3MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "bit(2)"));
column3MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", true));
DbRecordValuesList[] columnsMetaData = new DbRecordValuesList[] { column1MetaData, column2MetaData, column3MetaData };
// to be returned for the first table
DbRecordValuesList recordValue1 = new DbRecordValuesList();
recordValue1.add(new DbRecordValue("table1", "name1", "value1"));
recordValue1.add(new DbRecordValue("table1", "name2", null));
recordValue1.add(new DbRecordValue("table1", "name3", "0x00"));
DbRecordValuesList[] recordValues1 = new DbRecordValuesList[] { recordValue1 };
// to be returned for the seconds table - one of the columns should be skipped
DbRecordValuesList recordValue2 = new DbRecordValuesList();
recordValue2.add(new DbRecordValue("table1", "name1", "value1"));
recordValue2.add(new DbRecordValue("table1", "name3", "0x10"));
DbRecordValuesList[] recordValues2 = new DbRecordValuesList[] { recordValue2 };
expect(mockDbProvider.getConnection()).andReturn(mockConnection);
expect(mockConnection.getMetaData()).andReturn(metaData);
expect(mockDbConnection.getUser()).andReturn("myUserName").atLeastOnce();
expect(metaData.getDriverMajorVersion()).andReturn(5);
expect(metaData.getDriverMinorVersion()).andReturn(1);
expect(mockDbProvider.select(isA(String.class))).andReturn(columnsMetaData);
expect(mockDbProvider.select(isA(DbQuery.class), eq(DbReturnModes.ESCAPED_STRING))).andReturn(recordValues1);
expect(mockDbProvider.select(isA(String.class))).andReturn(columnsMetaData);
expect(mockDbProvider.select(isA(DbQuery.class), eq(DbReturnModes.ESCAPED_STRING))).andReturn(recordValues2);
// expect the file writer calls
// foreign keys check start
mockFileWriter.write("SET FOREIGN_KEY_CHECKS = 0;" + EOL_MARKER + LINE_SEPARATOR);
// table1
mockFileWriter.write("LOCK TABLES `table1` WRITE;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("DELETE FROM `table1`;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("INSERT INTO `table1` (name1,name2,name3) VALUES('value1',NULL,0x00);" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.flush();
mockFileWriter.write("UNLOCK TABLES;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write(LINE_SEPARATOR);
// table2
mockFileWriter.write("LOCK TABLES `table2` WRITE;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("DELETE FROM `table2`;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("INSERT INTO `table2` (name1,name3) VALUES('value1',0x10);" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.flush();
mockFileWriter.write("UNLOCK TABLES;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write(LINE_SEPARATOR);
// foreign keys check end
mockFileWriter.write("");
replayAll();
DbTable table1 = new DbTable("table1");
List<String> columnsToSkip = new ArrayList<String>();
columnsToSkip.add("name2");
DbTable table2 = new DbTable("table2", "dbo", columnsToSkip);
MysqlEnvironmentHandler envHandler = new MysqlEnvironmentHandler(mockDbConnection, mockDbProvider);
envHandler.addTable(table1);
envHandler.addTable(table2);
envHandler.writeBackupToFile(mockFileWriter);
verifyAll();
}
use of com.axway.ats.core.dbaccess.DbRecordValue in project ats-framework by Axway.
the class Test_MysqlEnvironmentHandler method createBackupNegativeSkippedColumnIsNotNullable.
@Test(expected = ColumnHasNoDefaultValueException.class)
public void createBackupNegativeSkippedColumnIsNotNullable() throws DatabaseEnvironmentCleanupException, DbException, IOException, ParseException, SQLException {
// the columns meta data
DbRecordValuesList column1MetaData = new DbRecordValuesList();
column1MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name1"));
column1MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "varchar(32)"));
column1MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", true));
DbRecordValuesList column2MetaData = new DbRecordValuesList();
column2MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name2"));
column2MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "varchar(32)"));
column2MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", null));
DbRecordValuesList column3MetaData = new DbRecordValuesList();
column3MetaData.add(new DbRecordValue("", "COLUMN_NAME", "name3"));
column3MetaData.add(new DbRecordValue("", "COLUMN_TYPE", "bit(1)"));
column3MetaData.add(new DbRecordValue("", "COLUMN_DEFAULT", 1));
DbRecordValuesList[] columnsMetaData = new DbRecordValuesList[] { column1MetaData, column2MetaData, column3MetaData };
DbRecordValuesList record1Value = new DbRecordValuesList();
record1Value.add(new DbRecordValue("table1", "name1", "value1"));
record1Value.add(new DbRecordValue("table1", "name2", null));
record1Value.add(new DbRecordValue("table1", "name3", "1"));
DbRecordValuesList[] recordValues = new DbRecordValuesList[] { record1Value };
expect(mockDbProvider.getConnection()).andReturn(mockConnection);
expect(mockConnection.getMetaData()).andReturn(metaData);
expect(mockDbConnection.getUser()).andReturn("myUserName").atLeastOnce();
expect(metaData.getDriverMajorVersion()).andReturn(5);
expect(metaData.getDriverMinorVersion()).andReturn(1);
expect(mockDbProvider.select(isA(String.class))).andReturn(columnsMetaData);
expect(mockDbProvider.select(isA(DbQuery.class), eq(DbReturnModes.ESCAPED_STRING))).andReturn(recordValues);
expect(mockDbProvider.select(isA(String.class))).andReturn(columnsMetaData);
expect(mockDbProvider.select(isA(DbQuery.class), eq(DbReturnModes.ESCAPED_STRING))).andReturn(recordValues);
mockFileWriter.write("SET FOREIGN_KEY_CHECKS = 0;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("DELETE FROM `table1`;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("DELETE FROM `table2`;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("LOCK TABLES `table1` WRITE;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write("INSERT INTO `table1` (name1,name2,name3) VALUES('value1',NULL,0x1);" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.flush();
mockFileWriter.write("UNLOCK TABLES;" + EOL_MARKER + LINE_SEPARATOR);
mockFileWriter.write(LINE_SEPARATOR);
replayAll();
DbTable table1 = new DbTable("table1");
List<String> columnsToSkip = new ArrayList<String>();
columnsToSkip.add("name2");
DbTable table2 = new DbTable("table2", "dbo", columnsToSkip);
MysqlEnvironmentHandler envHandler = new MysqlEnvironmentHandler(mockDbConnection, mockDbProvider);
envHandler.addTable(table1);
envHandler.addTable(table2);
envHandler.writeBackupToFile(mockFileWriter);
verifyAll();
}
Aggregations