use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.
the class ModelerServiceIT method domainForCsvDatasource_GeneratedWithCsvDatasourceImportStrategy.
@Test
public void domainForCsvDatasource_GeneratedWithCsvDatasourceImportStrategy() throws Exception {
ModelInfo modelInfo = new ModelInfo();
ColumnInfo[] columnInfos = new ColumnInfo[] { createColumnInfo("id", "title") };
modelInfo.setColumns(columnInfos);
modelerService = spy(modelerService);
DatabaseMeta dbMeta = mock(DatabaseMeta.class);
doReturn(dbMeta).when(modelerService).getDatabaseMeta();
TableModelerSource source = mock(TableModelerSource.class);
doReturn(source).when(modelerService).createTableModelerSource(any(DatabaseMeta.class), anyString(), anyString(), anyString());
modelerService.generateCSVDomain(modelInfo);
verify(modelerService).toColumns(columnInfos);
// most important thing here, is that domain is generated with CsvDatasourceImportStrategy
verify(source).generateDomain(any(CsvDatasourceImportStrategy.class));
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.
the class CsvDatasourceServiceImplTest method stageFile_CsvFile.
@Test
public void stageFile_CsvFile() throws Exception {
String filename = "stageFile_CsvFile.csv";
File file = createTmpCsvFile(filename);
file.deleteOnExit();
try {
ModelInfo modelInfo = service.stageFile(filename, ",", "\n", true, "utf-8");
CsvFileInfo fileInfo = modelInfo.getFileInfo();
assertEquals("One header row", 1, fileInfo.getHeaderRows());
assertEquals("Header + content row", 2, fileInfo.getContents().size());
assertEquals(filename, fileInfo.getTmpFilename());
} finally {
file.delete();
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.
the class DSWDatasourceServiceImplTest method testGenerateQueryDomain.
private void testGenerateQueryDomain(String modelName, String query, List<String> roleList, List<String> userList) throws DatasourceServiceException {
ModelInfo modelInfo = mock(ModelInfo.class);
when(modelInfo.getFileInfo()).thenReturn(mock(CsvFileInfo.class));
DatasourceDTO datasourceDTO = new DatasourceDTO();
datasourceDTO.setConnectionName(CONNECTION_NAME);
datasourceDTO.setDatasourceName(CONNECTION_NAME);
datasourceDTO.setCsvModelInfo(modelInfo);
DatabaseConnection connectionSpy = spy(new DatabaseConnection());
connectionSpy.setName(CONNECTION_NAME);
connectionSpy.setDatabaseName("[database name 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setDatabasePort("123456");
connectionSpy.setHostname("[hostname 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setPassword("[password 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setUsername("[username 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setDatabaseType(mock(IDatabaseType.class));
doReturn(modelerService).when(dswService).createModelerService();
doReturn(true).when(dswService).hasDataAccessPermission();
doReturn(roleList).when(dswService).getPermittedRoleList();
doReturn(userList).when(dswService).getPermittedUserList();
doReturn(null).when(dswService).getGeoContext();
doReturn(1).when(dswService).getDefaultAcls();
QueryDatasourceSummary summary = dswService.generateQueryDomain(modelName, query, connectionSpy, datasourceDTO);
try {
verify(dswService).executeQuery("[connection 接続 <;>!@#$%^&*()_-=+.,]", query, "1");
} catch (Exception e) {
e.printStackTrace();
}
verify(connectionSpy).setName("[connection 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setDatabaseName("[database name 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy, times(2)).setDatabasePort("123456");
verify(connectionSpy).setHostname("[hostname 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setPassword("[password 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setUsername("[username 接続 <;>!@#$%^&*()_-=+.,]");
assertNotNull(summary);
assertNotNull(summary.getDomain());
assertEquals(CONNECTION_NAME, summary.getDomain().getId());
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.
the class CsvDatasourceServiceImpl method generateDomain.
public FileTransformStats generateDomain(DatasourceDTO datasourceDto) throws Exception {
checkPermissions();
synchronized (lock) {
ModelInfo modelInfo = datasourceDto.getCsvModelInfo();
IPentahoSession pentahoSession = null;
try {
pentahoSession = PentahoSessionHolder.getSession();
KettleSystemListener.environmentInit(pentahoSession);
String statsKey = // $NON-NLS-1$
FileTransformStats.class.getSimpleName() + "_" + modelInfo.getFileInfo().getTmpFilename();
FileTransformStats stats = new FileTransformStats();
pentahoSession.setAttribute(statsKey, stats);
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(modelInfo, AgileHelper.getDatabaseMeta());
csvTransformGenerator.setTransformStats(stats);
try {
csvTransformGenerator.dropTable(modelInfo.getStageTableName());
} catch (CsvTransformGeneratorException e) {
// this is ok, the table may not have existed.
// $NON-NLS-1$
logger.info("Could not drop table before staging");
}
csvTransformGenerator.createOrModifyTable(pentahoSession);
// no longer need to truncate the table since we dropped it a few lines up, so just pass false
csvTransformGenerator.loadTable(false, pentahoSession, true);
ArrayList<String> combinedErrors = new ArrayList<String>(modelInfo.getCsvInputErrors());
combinedErrors.addAll(modelInfo.getTableOutputErrors());
if (stats.getErrors() != null && stats.getErrors().size() > 0) {
stats.getErrors().addAll(combinedErrors);
} else {
stats.setErrors(combinedErrors);
}
// wait until it it done
while (!stats.isRowsFinished()) {
Thread.sleep(200);
}
modelerWorkspace.setDomain(modelerService.generateCSVDomain(modelInfo));
modelerWorkspace.getWorkspaceHelper().autoModelFlat(modelerWorkspace);
modelerWorkspace.getWorkspaceHelper().autoModelRelationalFlat(modelerWorkspace);
modelerWorkspace.setModelName(modelInfo.getDatasourceName());
modelerWorkspace.getWorkspaceHelper().populateDomain(modelerWorkspace);
Domain workspaceDomain = modelerWorkspace.getDomain();
XStream xstream = new XStream();
String serializedDto = xstream.toXML(datasourceDto);
workspaceDomain.getLogicalModels().get(0).setProperty("datasourceModel", serializedDto);
workspaceDomain.getLogicalModels().get(0).setProperty("DatasourceType", "CSV");
prepareForSerialization(workspaceDomain);
modelerService.serializeModels(workspaceDomain, modelerWorkspace.getModelName());
stats.setDomain(modelerWorkspace.getDomain());
return stats;
} catch (Exception e) {
logger.error(e.getMessage());
throw e;
} finally {
if (pentahoSession != null) {
pentahoSession.destroy();
}
}
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.ModelInfo in project data-access by pentaho.
the class CsvUtils method getFileContents.
// $NON-NLS-1$ //$NON-NLS-2$
public ModelInfo getFileContents(String project, String name, String delimiter, String enclosure, int rows, boolean isFirstRowHeader, String encoding) throws Exception {
String path;
if (name.endsWith(".tmp")) {
// $NON-NLS-1$
path = PentahoSystem.getApplicationContext().getSolutionPath(TMP_FILE_PATH);
} else {
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
}
String fileLocation = path + name;
ModelInfo result = new ModelInfo();
CsvFileInfo fileInfo = new CsvFileInfo();
fileInfo.setTmpFilename(name);
result.setFileInfo(fileInfo);
fileInfo.setContents(getLinesList(fileLocation, rows, encoding));
fileInfo.setDelimiter(delimiter);
fileInfo.setEnclosure(enclosure);
fileInfo.setHeaderRows(0);
// now try to generate some columns
return result;
}
Aggregations