use of com.servoy.j2db.persistence.Table in project servoy-client by Servoy.
the class FoundSetManager method acquireLock.
/*
* _____________________________________________________________ locking methods
*/
// index == -1 is (current) selected record,< -1 is all records
public boolean acquireLock(IFoundSet fs, int index, String lockName) {
if (fs instanceof IFoundSetInternal) {
IFoundSetInternal foundSet = (IFoundSetInternal) fs;
if (foundSet.getSQLSheet() == null || foundSet.getSQLSheet().getTable() == null) {
return false;
}
Map<Object, Object[]> pkhashkeys = new HashMap<Object, Object[]>();
if (index == -1) {
int idx = foundSet.getSelectedIndex();
if (idx >= 0 && idx < foundSet.getSize()) {
IRecordInternal rec = foundSet.getRecord(idx);
// just for safety
if (rec == null || rec.getRawData() == null)
return false;
if (!rec.getRawData().lockedByMyself())
pkhashkeys.put(rec.getPKHashKey(), rec.getPK());
} else {
// wrong index
return false;
}
} else if (index < -1) {
for (int i = 0; i < foundSet.getSize(); i++) {
IRecordInternal rec = foundSet.getRecord(i);
// just for safety
if (rec == null || rec.getRawData() == null)
return false;
if (!rec.getRawData().lockedByMyself())
pkhashkeys.put(rec.getPKHashKey(), rec.getPK());
}
} else if (index >= 0) {
if (index < foundSet.getSize()) {
IRecordInternal rec = foundSet.getRecord(index);
// just for safety
if (rec == null || rec.getRawData() == null)
return false;
if (!rec.getRawData().lockedByMyself())
pkhashkeys.put(rec.getPKHashKey(), rec.getPK());
} else {
// wrong index
return false;
}
} else {
// unknown index
return false;
}
if (// optimize
pkhashkeys.size() == 0) {
return true;
}
Table table = (Table) foundSet.getTable();
if (table != null) {
String server_name = foundSet.getSQLSheet().getServerName();
String table_name = foundSet.getSQLSheet().getTable().getName();
RowManager rm = rowManagers.get(DataSourceUtils.createDBTableDataSource(server_name, table_name));
// process
Set<Object> keySet = pkhashkeys.keySet();
// make copy because it is not serialized in developer and set is emptied
Set<Object> ids = new HashSet<Object>(keySet);
QuerySelect lockSelect = SQLGenerator.createUpdateLockSelect(table, pkhashkeys.values().toArray(new Object[pkhashkeys.size()][]), // $NON-NLS-1$ //$NON-NLS-2$
hasTransaction() && Boolean.parseBoolean(application.getSettings().getProperty("servoy.record.lock.lockInDB", "false")));
if (rm != null) {
if (rm.acquireLock(application.getClientID(), lockSelect, lockName, ids)) {
if (infoListener != null)
infoListener.showLocksStatus(true);
// success
return true;
}
}
}
}
return false;
}
use of com.servoy.j2db.persistence.Table in project servoy-client by Servoy.
the class RowManager method getRowUpdateInfo.
RowUpdateInfo getRowUpdateInfo(Row row, boolean tracking) throws ServoyException {
try {
if (row.getRowManager() != this) {
// $NON-NLS-1$
throw new IllegalArgumentException("I'm not the row manager from row");
}
if (adjustingForChangeByOtherPKHashKey.get() != null && adjustingForChangeByOtherPKHashKey.get().equals(row.getPKHashKey())) {
row.flagExistInDB();
// we ignore changes here because stored calc with time element are always changed,resulting in endlessloop between clients
return null;
}
if (row.getLastException() instanceof DataException) {
// cannot update an row which is not changed (which clears the dataexception)
return null;
}
if (!row.isChanged())
return null;
boolean mustRequeryRow = false;
List<Column> dbPKReturnValues = new ArrayList<Column>();
SQLSheet.SQLDescription sqlDesc = null;
int statement_action;
ISQLUpdate sqlUpdate = null;
IServer server = fsm.getApplication().getSolution().getServer(sheet.getServerName());
boolean oracleServer = SQLSheet.isOracleServer(server);
boolean usesLobs = false;
Table table = sheet.getTable();
boolean doesExistInDB = row.existInDB();
List<String> aggregatesToRemove = new ArrayList<String>(8);
List<String> changedColumns = null;
if (doesExistInDB) {
statement_action = ISQLActionTypes.UPDATE_ACTION;
sqlDesc = sheet.getSQLDescription(SQLSheet.UPDATE);
sqlUpdate = (QueryUpdate) AbstractBaseQuery.deepClone(sqlDesc.getSQLQuery());
List<String> req = sqlDesc.getRequiredDataProviderIDs();
List<String> old = sqlDesc.getOldRequiredDataProviderIDs();
Object[] olddata = row.getRawOldColumnData();
if (// for safety only, nothing changed
olddata == null) {
return null;
}
Object[] newdata = row.getRawColumnData();
for (int i = 0; i < olddata.length; i++) {
String dataProviderID = req.get(i);
Column c = table.getColumn(dataProviderID);
ColumnInfo ci = c.getColumnInfo();
if (ci != null && ci.isDBManaged()) {
mustRequeryRow = true;
} else {
Object modificationValue = c.getModificationValue(fsm.getApplication());
if (modificationValue != null) {
row.setRawValue(dataProviderID, modificationValue);
}
if (newdata[i] instanceof BlobMarkerValue) {
// because that would be a byte[]
continue;
}
if (!Utils.equalObjects(olddata[i], newdata[i])) {
if (sheet.isUsedByAggregate(dataProviderID)) {
aggregatesToRemove.addAll(sheet.getAggregateName(dataProviderID));
}
Object robj = c.getAsRightType(newdata[i]);
if (robj == null)
robj = ValueFactory.createNullValue(c.getType());
((QueryUpdate) sqlUpdate).addValue(c.queryColumn(((QueryUpdate) sqlUpdate).getTable()), robj);
if (changedColumns == null) {
changedColumns = new ArrayList<String>(olddata.length - i);
}
changedColumns.add(c.getName());
if (oracleServer && !usesLobs) {
int type = c.getType();
if (type == Types.BLOB && robj instanceof byte[] && ((byte[]) robj).length > 4000) {
usesLobs = true;
} else if (type == Types.CLOB && robj instanceof String && ((String) robj).length() > 4000) {
usesLobs = true;
}
}
}
}
}
if (// nothing changed after all
changedColumns == null) {
// clear the old data now else it will be kept and in a changed state.
row.flagExistInDB();
return null;
}
// add PK
Object[] pkValues = new Object[old.size()];
for (int j = 0; j < old.size(); j++) {
String dataProviderID = old.get(j);
pkValues[j] = row.getOldRequiredValue(dataProviderID);
}
// TODO: check for success
AbstractBaseQuery.setPlaceholderValue(sqlUpdate, new TablePlaceholderKey(((QueryUpdate) sqlUpdate).getTable(), SQLGenerator.PLACEHOLDER_PRIMARY_KEY), pkValues);
} else {
List<Object> argsArray = new ArrayList<Object>();
statement_action = ISQLActionTypes.INSERT_ACTION;
sqlDesc = sheet.getSQLDescription(SQLSheet.INSERT);
sqlUpdate = (ISQLUpdate) AbstractBaseQuery.deepClone(sqlDesc.getSQLQuery());
List<String> req = sqlDesc.getRequiredDataProviderIDs();
if (Debug.tracing())
Debug.trace(sqlUpdate.toString());
for (int i = 0; i < req.size(); i++) {
String dataProviderID = req.get(i);
if (sheet.isUsedByAggregate(dataProviderID)) {
aggregatesToRemove.addAll(sheet.getAggregateName(dataProviderID));
}
Column c = table.getColumn(dataProviderID);
QueryColumn queryColumn = c.queryColumn(((QueryInsert) sqlUpdate).getTable());
ColumnInfo ci = c.getColumnInfo();
if (c.isDBIdentity()) {
dbPKReturnValues.add(c);
argsArray.add(row.getDbIdentValue());
} else if (ci != null && ci.isDBManaged()) {
mustRequeryRow = true;
} else {
int columnIndex = getSQLSheet().getColumnIndex(dataProviderID);
// HACK: DIRTY way, should use some kind of identifier preferably
if (c.getDatabaseDefaultValue() != null && row.getRawValue(columnIndex, false) == null && c.getRowIdentType() == IBaseColumn.NORMAL_COLUMN) {
// The database has a default value, and the value is null, and this is an insert...
// Remove the column from the query entirely and make sure the default value is requeried from the db.
mustRequeryRow = true;
((QueryInsert) sqlUpdate).removeColumn(queryColumn);
} else {
Object robj = c.getAsRightType(row.getRawValue(columnIndex, false));
if (robj == null)
robj = ValueFactory.createNullValue(c.getType());
argsArray.add(robj);
if (oracleServer && !usesLobs) {
int type = c.getType();
if (type == Types.BLOB && robj instanceof byte[] && ((byte[]) robj).length > 4000) {
usesLobs = true;
} else if (type == Types.CLOB && robj instanceof String && ((String) robj).length() > 4000) {
usesLobs = true;
}
}
}
}
}
AbstractBaseQuery.setPlaceholderValue(sqlUpdate, new TablePlaceholderKey(((QueryInsert) sqlUpdate).getTable(), SQLGenerator.PLACEHOLDER_INSERT_KEY), argsArray.toArray());
}
Object[] pk = row.getPK();
IDataSet pks = new BufferedDataSet();
pks.addRow(pk);
String tid = null;
GlobalTransaction gt = fsm.getGlobalTransaction();
if (gt != null) {
tid = gt.getTransactionID(sheet.getServerName());
}
QuerySelect requerySelect = null;
if (mustRequeryRow) {
requerySelect = (QuerySelect) AbstractBaseQuery.deepClone(sheet.getSQL(SQLSheet.SELECT));
if (!requerySelect.setPlaceholderValue(new TablePlaceholderKey(requerySelect.getTable(), SQLGenerator.PLACEHOLDER_PRIMARY_KEY), pk)) {
Debug.error(new RuntimeException(// $NON-NLS-1$
"Could not set placeholder " + new TablePlaceholderKey(requerySelect.getTable(), SQLGenerator.PLACEHOLDER_PRIMARY_KEY) + " in query " + requerySelect + // $NON-NLS-1$//$NON-NLS-2$
"-- continuing"));
}
}
SQLStatement statement = new SQLStatement(statement_action, sheet.getServerName(), table.getName(), pks, tid, sqlUpdate, fsm.getTableFilterParams(sheet.getServerName(), sqlUpdate), requerySelect);
// check that the row is updated (skip check for insert)
if (doesExistInDB)
statement.setExpectedUpdateCount(1);
if (changedColumns != null) {
statement.setChangedColumns(changedColumns.toArray(new String[changedColumns.size()]));
}
statement.setOracleFixTrackingData(usesLobs && !tracking);
statement.setIdentityColumn(dbPKReturnValues.size() == 0 ? null : dbPKReturnValues.get(0));
if (tracking || usesLobs) {
statement.setTrackingData(sheet.getColumnNames(), row.getRawOldColumnData() != null ? new Object[][] { row.getRawOldColumnData() } : null, row.getRawColumnData() != null ? new Object[][] { row.getRawColumnData() } : null, fsm.getApplication().getUserUID(), fsm.getTrackingInfo(), fsm.getApplication().getClientID());
}
return new RowUpdateInfo(row, statement, dbPKReturnValues, aggregatesToRemove);
} catch (RemoteException e) {
throw new RepositoryException(e);
}
}
use of com.servoy.j2db.persistence.Table in project servoy-client by Servoy.
the class JSDatabaseManager method js_mergeRecords.
// strongly recommended to use a transaction
// currently does not support compound pks
/**
* Merge records from the same foundset, updates entire datamodel (via foreign type on columns) with destination
* record pk, deletes source record. Do use a transaction!
*
* This function is very handy in situations where duplicate data exists. It allows you to merge the two records
* and move all related records in one go. Say the source_record is "Ikea" and the combined_destination_record is "IKEA", the
* "Ikea" record is deleted and all records related to it (think of contacts and orders, for instance) will be related
* to the "IKEA" record.
*
* The function takes an optional array of column names. If provided, the data in the named columns will be copied
* from source_record to combined_destination_record.
*
* Note that it is essential for both records to originate from the same foundset, as shown in the sample code.
*
* @sample databaseManager.mergeRecords(foundset.getRecord(1),foundset.getRecord(2));
*
* @param sourceRecord The source JSRecord to copy from.
* @param combinedDestinationRecord The target/destination JSRecord to copy into.
* @param columnNames The column names array that should be copied.
*
* @return true if the records could me merged.
*/
public boolean js_mergeRecords(IRecordInternal sourceRecord, IRecordInternal combinedDestinationRecord, String[] columnNames) throws ServoyException {
checkAuthorized();
if (sourceRecord != null && combinedDestinationRecord != null) {
FoundSetManager fsm = (FoundSetManager) application.getFoundSetManager();
try {
if (sourceRecord.getParentFoundSet() != combinedDestinationRecord.getParentFoundSet()) {
return false;
}
Table mainTable = (Table) combinedDestinationRecord.getParentFoundSet().getTable();
String mainTableForeignType = mainTable.getName();
String transaction_id = fsm.getTransactionID(mainTable.getServerName());
Object sourceRecordPK = null;
Object combinedDestinationRecordPK = null;
Column pkc = null;
Iterator<Column> pk_it = mainTable.getRowIdentColumns().iterator();
if (pk_it.hasNext()) {
pkc = pk_it.next();
sourceRecordPK = sourceRecord.getValue(pkc.getDataProviderID());
if (sourceRecordPK == null)
sourceRecordPK = ValueFactory.createNullValue(pkc.getType());
combinedDestinationRecordPK = combinedDestinationRecord.getValue(pkc.getDataProviderID());
if (combinedDestinationRecordPK == null)
combinedDestinationRecordPK = ValueFactory.createNullValue(pkc.getType());
// multipk not supported
if (pk_it.hasNext())
return false;
}
List<SQLStatement> updates = new ArrayList<SQLStatement>();
IServer server = application.getSolution().getServer(mainTable.getServerName());
if (server != null) {
Iterator<String> it = server.getTableNames(false).iterator();
while (it.hasNext()) {
String tableName = it.next();
Table table = (Table) server.getTable(tableName);
// not supported
if (table.getRowIdentColumnsCount() > 1)
continue;
Iterator<Column> it2 = table.getColumns().iterator();
while (it2.hasNext()) {
Column c = it2.next();
if (c.getColumnInfo() != null) {
if (mainTableForeignType.equalsIgnoreCase(c.getColumnInfo().getForeignType())) {
// update table set foreigntypecolumn = combinedDestinationRecordPK where foreigntypecolumn = sourceRecordPK
QueryTable qTable = new QueryTable(table.getSQLName(), table.getDataSource(), table.getCatalog(), table.getSchema());
QueryUpdate qUpdate = new QueryUpdate(qTable);
QueryColumn qc = c.queryColumn(qTable);
qUpdate.addValue(qc, combinedDestinationRecordPK);
ISQLCondition condition = new CompareCondition(IBaseSQLCondition.EQUALS_OPERATOR, qc, sourceRecordPK);
qUpdate.setCondition(condition);
IDataSet pks = new BufferedDataSet();
// unknown number of records changed
pks.addRow(new Object[] { ValueFactory.createTableFlushValue() });
SQLStatement statement = new SQLStatement(ISQLActionTypes.UPDATE_ACTION, table.getServerName(), table.getName(), pks, transaction_id, qUpdate, fsm.getTableFilterParams(table.getServerName(), qUpdate));
updates.add(statement);
}
}
}
}
}
IDataSet pks = new BufferedDataSet();
pks.addRow(new Object[] { sourceRecordPK });
QueryTable qTable = new QueryTable(mainTable.getSQLName(), mainTable.getDataSource(), mainTable.getCatalog(), mainTable.getSchema());
QueryDelete qDelete = new QueryDelete(qTable);
ISQLCondition condition = new CompareCondition(IBaseSQLCondition.EQUALS_OPERATOR, pkc.queryColumn(qTable), sourceRecordPK);
qDelete.setCondition(condition);
SQLStatement statement = new SQLStatement(ISQLActionTypes.DELETE_ACTION, mainTable.getServerName(), mainTable.getName(), pks, transaction_id, qDelete, fsm.getTableFilterParams(mainTable.getServerName(), qDelete));
// check that the row is really deleted
statement.setExpectedUpdateCount(1);
updates.add(statement);
IFoundSetInternal sfs = sourceRecord.getParentFoundSet();
if (combinedDestinationRecord.startEditing()) {
if (columnNames != null) {
for (String element : columnNames) {
if (element == null)
continue;
if (sfs.getColumnIndex(element) >= 0) {
combinedDestinationRecord.setValue(element, sourceRecord.getValue(element));
}
}
}
fsm.getEditRecordList().stopEditing(true, combinedDestinationRecord);
} else {
return false;
}
Object[] results = fsm.getDataServer().performUpdates(fsm.getApplication().getClientID(), updates.toArray(new ISQLStatement[updates.size()]));
for (int i = 0; results != null && i < results.length; i++) {
if (results[i] instanceof ServoyException) {
throw (ServoyException) results[i];
}
}
// sfs.deleteRecord(sfs.getRecordIndex(sourceRecord), true); not needed, will be flushed from memory in finally
return true;
} catch (Exception ex) {
// $NON-NLS-1$
application.handleException(// $NON-NLS-1$
application.getI18NMessage("servoy.foundsetupdater.updateFailed"), new ApplicationException(ServoyException.SAVE_FAILED, ex));
} finally {
fsm.flushCachedDatabaseData(null);
}
}
return false;
}
use of com.servoy.j2db.persistence.Table in project servoy-client by Servoy.
the class JSDatabaseManager method js_getSQL.
/**
* Returns the internal SQL which defines the specified (related)foundset.
* Optionally, the foundset and table filter params can be excluded in the sql (includeFilters=false).
* Make sure to set the applicable filters when the sql is used in a loadRecords() call.
* When the founset is in find mode, the find conditions are included in the resulting query.
*
* @sample var sql = databaseManager.getSQL(foundset)
*
* @param foundsetOrQBSelect The JSFoundset or QBSelect to get the sql for.
* @param includeFilters include the foundset and table filters.
*
* @return String representing the sql of the JSFoundset.
*/
public String js_getSQL(Object foundsetOrQBSelect, boolean includeFilters) throws ServoyException {
checkAuthorized();
if (foundsetOrQBSelect instanceof IFoundSetInternal && ((IFoundSetInternal) foundsetOrQBSelect).getTable() != null) {
try {
QuerySet querySet = getQuerySet(((IFoundSetInternal) foundsetOrQBSelect).getCurrentStateQuery(true, false), includeFilters);
StringBuilder sql = new StringBuilder();
QueryString[] prepares = querySet.getPrepares();
for (int i = 0; prepares != null && i < prepares.length; i++) {
// TODO parameters from updates and cleanups
// sql.append(updates[i].getSql());
// sql.append("\n"); //$NON-NLS-1$
}
sql.append(querySet.getSelect().getSql());
QueryString[] cleanups = querySet.getCleanups();
for (int i = 0; cleanups != null && i < cleanups.length; i++) {
// TODO parameters from updates and cleanups
// sql.append("\n"); //$NON-NLS-1$
// sql.append(cleanups[i].getSql());
}
return sql.toString();
} catch (Exception e) {
Debug.error(e);
}
} else if (foundsetOrQBSelect instanceof QBSelect) {
try {
QuerySelect select = ((QBSelect) foundsetOrQBSelect).build();
if (select.getColumns() == null) {
// no columns, add pk
// note that QBSelect.build() already returns a clone
ITable table = application.getFoundSetManager().getTable(select.getTable().getDataSource());
Iterator<Column> pkIt = ((Table) table).getRowIdentColumns().iterator();
if (!pkIt.hasNext()) {
throw new RepositoryException(ServoyException.InternalCodes.PRIMARY_KEY_NOT_FOUND, new Object[] { table.getName() });
}
while (pkIt.hasNext()) {
Column c = pkIt.next();
select.addColumn(c.queryColumn(select.getTable()));
}
}
QuerySet querySet = getQuerySet(select, includeFilters);
return querySet.getSelect().getSql();
} catch (RemoteException e) {
Debug.error(e);
}
}
return null;
}
use of com.servoy.j2db.persistence.Table in project servoy-client by Servoy.
the class JSDatabaseManager method js_convertToDataSet.
/**
* @clonedesc js_convertToDataSet(IFoundSetInternal)
*
* @sampleas js_convertToDataSet(IFoundSetInternal)
*
* @param foundset The foundset to be converted.
* @param dataproviderNames Array with column names.
*
* @return JSDataSet with the data.
*/
public JSDataSet js_convertToDataSet(IFoundSetInternal foundset, String[] dataproviderNames) throws RepositoryException {
if (foundset == null) {
return null;
}
// $NON-NLS-1$
String[] dpnames = { "id" };
ColumnType[] dptypes = { ColumnType.getInstance(IColumnTypes.INTEGER, Integer.MAX_VALUE, 0) };
List<Object[]> lst = new ArrayList<Object[]>();
FoundSet fs = (FoundSet) foundset;
if (fs.getTable() != null) {
if (dataproviderNames != null) {
dpnames = dataproviderNames;
} else {
dpnames = fs.getSQLSheet().getPKColumnDataProvidersAsArray();
}
FoundSetManager fsm = (FoundSetManager) application.getFoundSetManager();
boolean getInOneQuery = !fs.isInFindMode() && (fs.hadMoreRows() || fs.getSize() > fsm.config.pkChunkSize()) && !fsm.getEditRecordList().hasEditedRecords(fs);
dptypes = new ColumnType[dpnames.length];
Table table = fs.getSQLSheet().getTable();
Map<String, Column> columnMap = new HashMap<String, Column>();
for (int i = 0; i < dpnames.length; i++) {
IDataProvider dp = application.getFlattenedSolution().getDataProviderForTable(table, dpnames[i]);
dptypes[i] = dp == null ? ColumnType.getInstance(0, 0, 0) : ColumnType.getInstance(dp instanceof Column ? ((Column) dp).getType() : dp.getDataProviderType(), dp.getLength(), dp instanceof Column ? ((Column) dp).getScale() : 0);
if (getInOneQuery) {
// only columns and data we can get from the foundset (calculations only when stored)
if (dp instanceof Column) {
columnMap.put(dpnames[i], (Column) dp);
// Blobs require special resultset handling
getInOneQuery = !SQLGenerator.isBlobColumn((Column) dp);
} else {
// aggregates, globals
getInOneQuery = fs.containsDataProvider(dpnames[i]);
}
}
}
if (getInOneQuery && columnMap.size() > 0) {
// large foundset, query the columns in 1 go
QuerySelect sqlSelect = AbstractBaseQuery.deepClone(fs.getQuerySelectForReading());
ArrayList<IQuerySelectValue> cols = new ArrayList<IQuerySelectValue>(columnMap.size());
ArrayList<String> distinctColumns = new ArrayList<String>(columnMap.size());
for (String dpname : dpnames) {
Column column = columnMap.get(dpname);
if (column != null && !distinctColumns.contains(dpname)) {
distinctColumns.add(dpname);
cols.add(column.queryColumn(sqlSelect.getTable()));
}
}
boolean hasJoins = sqlSelect.getJoins() != null;
if (hasJoins) {
// add pk columns so distinct-in-memory can be used
List<Column> rowIdentColumns = ((Table) fs.getTable()).getRowIdentColumns();
for (Column column : rowIdentColumns) {
if (!columnMap.containsKey(column.getDataProviderID())) {
cols.add(column.queryColumn(sqlSelect.getTable()));
}
}
}
sqlSelect.setColumns(cols);
try {
SQLSheet sheet = fs.getSQLSheet();
IConverterManager<IColumnConverter> columnConverterManager = ((FoundSetManager) fs.getFoundSetManager()).getColumnConverterManager();
SQLStatement trackingInfo = null;
if (fsm.getEditRecordList().hasAccess(sheet.getTable(), IRepository.TRACKING_VIEWS)) {
trackingInfo = new SQLStatement(ISQLActionTypes.SELECT_ACTION, sheet.getServerName(), sheet.getTable().getName(), null, null);
trackingInfo.setTrackingData(sqlSelect.getColumnNames(), new Object[][] {}, new Object[][] {}, fsm.getApplication().getUserUID(), fsm.getTrackingInfo(), fsm.getApplication().getClientID());
}
IDataSet dataSet = fsm.getDataServer().performQuery(fsm.getApplication().getClientID(), sheet.getServerName(), fsm.getTransactionID(sheet), sqlSelect, null, fsm.getTableFilterParams(sheet.getServerName(), sqlSelect), hasJoins, 0, -1, IDataServer.FOUNDSET_LOAD_QUERY, trackingInfo);
lst = new ArrayList<Object[]>(dataSet.getRowCount());
for (int i = 0; i < dataSet.getRowCount(); i++) {
Object[] row = new Object[dpnames.length];
// may contain more data: pk columns for distinct-in-memory
Object[] dataseRow = dataSet.getRow(i);
for (int j = 0; j < dpnames.length; j++) {
Column column = columnMap.get(dpnames[j]);
if (column == null) {
// fs.containsDataProvider returned true for this dpname
row[j] = fs.getDataProviderValue(dpnames[j]);
} else {
row[j] = sheet.convertValueToObject(dataseRow[distinctColumns.indexOf(dpnames[j])], sheet.getColumnIndex(dpnames[j]), columnConverterManager);
}
}
lst.add(row);
}
} catch (RepositoryException e) {
throw e;
} catch (Exception e) {
Debug.error(e);
throw new RepositoryException(e.getMessage());
}
} else {
// loop over the records
for (int i = 0; i < fs.getSize(); i++) {
IRecordInternal record = fs.getRecord(i);
Object[] pk = new Object[dpnames.length];
for (int j = 0; j < dpnames.length; j++) {
pk[j] = record.getValue(dpnames[j]);
}
lst.add(pk);
}
}
}
return new JSDataSet(application, BufferedDataSetInternal.createBufferedDataSet(dpnames, dptypes, lst, false));
}
Aggregations