use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class CsvDatasourceServiceImpl method prepareForSerialization.
protected void prepareForSerialization(Domain domain) throws IOException {
/*
* This method is responsible for cleaning up legacy information when
* changing datasource types and also manages CSV files for CSV based
* datasources.
*/
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(FileUtils.DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
String path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
String TMP_FILE_PATH = File.separatorChar + "system" + File.separatorChar + File.separatorChar + "tmp" + File.separatorChar;
String sysTmpDir = PentahoSystem.getApplicationContext().getSolutionPath(TMP_FILE_PATH);
LogicalModel logicalModel = domain.getLogicalModels().get(0);
// $NON-NLS-1$
String modelState = (String) logicalModel.getProperty("datasourceModel");
if (modelState != null) {
XStream xs = new XStream();
DatasourceDTO datasource = (DatasourceDTO) xs.fromXML(modelState);
CsvFileInfo csvFileInfo = datasource.getCsvModelInfo().getFileInfo();
String tmpFileName = csvFileInfo.getTmpFilename();
String csvFileName = csvFileInfo.getFilename();
File tmpFile = new File(sysTmpDir + File.separatorChar + tmpFileName);
// Move CSV temporary file to final destination.
if (tmpFile.exists()) {
File csvFile = new File(path + File.separatorChar + csvFileName);
org.apache.commons.io.FileUtils.copyFile(tmpFile, csvFile);
}
// Cleanup logic when updating from SQL datasource to CSV
// datasource.
datasource.setQuery(null);
// Update datasourceModel with the new modelState
modelState = xs.toXML(datasource);
logicalModel.setProperty("datasourceModel", modelState);
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class CsvDatasourceServiceImplTest method testHasPermissions.
@Test
public void testHasPermissions() throws Exception {
hasPermissions = true;
final ISystemSettings systemSettings = mock(ISystemSettings.class);
when(systemSettings.getSystemSetting("data-access-override", "false")).thenReturn("false");
PentahoSystem.setSystemSettingsService(systemSettings);
String filename = "anotherStageFile_CsvFile.csv";
File file = createTmpCsvFile(filename);
file.deleteOnExit();
try {
ModelInfo modelInfo = service.stageFile(filename, ",", "\n", true, "utf-8");
CsvFileInfo fileInfo = modelInfo.getFileInfo();
assertEquals("One header row", 1, fileInfo.getHeaderRows());
assertEquals("Header + content row", 2, fileInfo.getContents().size());
assertEquals(filename, fileInfo.getTmpFilename());
final FileInfo[] stagedFiles = service.getStagedFiles();
assertNotNull(stagedFiles);
boolean present = false;
for (FileInfo info : stagedFiles) {
if (filename.equals(info.getName())) {
present = true;
break;
}
}
assertTrue(present);
final String encoding = service.getEncoding(filename);
assertNotNull(encoding);
final List<String> previewRows = service.getPreviewRows(filename, true, 1, "utf-8");
assertNotNull(previewRows);
assertEquals(1, previewRows.size());
assertEquals("col1,col2", previewRows.get(0));
final DatasourceDTO datasourceDto = mock(DatasourceDTO.class);
when(datasourceDto.getCsvModelInfo()).thenReturn(modelInfo);
try {
final FileTransformStats fileTransformStats = service.generateDomain(datasourceDto);
} catch (Exception e) {
// Testing this logic is not a purpose of this junit
}
// Passed permissions check
verify(datasourceDto, times(1)).getCsvModelInfo();
} finally {
file.delete();
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class DSWDatasourceServiceImplTest method testDeSerializeModelStateValidString.
@Test
public void testDeSerializeModelStateValidString() throws Exception {
PentahoSystem.registerObjectFactory(new TestObjectFactory());
DatasourceModel datasourceModel = spy(new DatasourceModel());
doReturn("testdatasource").when(datasourceModel).generateTableName();
datasourceModel.setDatasourceName("testDatasource");
datasourceModel.setDatasourceType(DatasourceType.CSV);
DatasourceDTO dto = DatasourceDTO.generateDTO(datasourceModel);
assertNotNull(dto);
String serializedDTO = dswService.serializeModelState(dto);
dswService.deSerializeModelState(serializedDTO);
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class DSWDatasourceServiceImplTest method testGenerateQueryDomain.
private void testGenerateQueryDomain(String modelName, String query, List<String> roleList, List<String> userList) throws DatasourceServiceException {
ModelInfo modelInfo = mock(ModelInfo.class);
when(modelInfo.getFileInfo()).thenReturn(mock(CsvFileInfo.class));
DatasourceDTO datasourceDTO = new DatasourceDTO();
datasourceDTO.setConnectionName(CONNECTION_NAME);
datasourceDTO.setDatasourceName(CONNECTION_NAME);
datasourceDTO.setCsvModelInfo(modelInfo);
DatabaseConnection connectionSpy = spy(new DatabaseConnection());
connectionSpy.setName(CONNECTION_NAME);
connectionSpy.setDatabaseName("[database name 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setDatabasePort("123456");
connectionSpy.setHostname("[hostname 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setPassword("[password 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setUsername("[username 接続 <;>!@#$%^&*()_-=+.,]");
connectionSpy.setDatabaseType(mock(IDatabaseType.class));
doReturn(modelerService).when(dswService).createModelerService();
doReturn(true).when(dswService).hasDataAccessPermission();
doReturn(roleList).when(dswService).getPermittedRoleList();
doReturn(userList).when(dswService).getPermittedUserList();
doReturn(null).when(dswService).getGeoContext();
doReturn(1).when(dswService).getDefaultAcls();
QueryDatasourceSummary summary = dswService.generateQueryDomain(modelName, query, connectionSpy, datasourceDTO);
try {
verify(dswService).executeQuery("[connection 接続 <;>!@#$%^&*()_-=+.,]", query, "1");
} catch (Exception e) {
e.printStackTrace();
}
verify(connectionSpy).setName("[connection 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setDatabaseName("[database name 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy, times(2)).setDatabasePort("123456");
verify(connectionSpy).setHostname("[hostname 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setPassword("[password 接続 <;>!@#$%^&*()_-=+.,]");
verify(connectionSpy).setUsername("[username 接続 <;>!@#$%^&*()_-=+.,]");
assertNotNull(summary);
assertNotNull(summary.getDomain());
assertEquals(CONNECTION_NAME, summary.getDomain().getId());
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.DatasourceDTO in project data-access by pentaho.
the class DSWDatasourceServiceImpl method prepareForSerializaton.
public void prepareForSerializaton(Domain domain) {
/*
* This method is responsible for cleaning up legacy information when
* changing datasource types and also manages CSV files for CSV based
* datasources.
*/
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(FileUtils.DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
String path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
LogicalModel logicalModel = domain.getLogicalModels().get(0);
String modelState = (String) logicalModel.getProperty("datasourceModel");
if (modelState != null) {
XStream xs = new XStream();
DatasourceDTO datasource = (DatasourceDTO) xs.fromXML(modelState);
CsvFileInfo csvFileInfo = datasource.getCsvModelInfo().getFileInfo();
String csvFileName = csvFileInfo.getFilename();
if (csvFileName != null) {
// Cleanup logic when updating from CSV datasource to SQL
// datasource.
csvFileInfo.setFilename(null);
csvFileInfo.setTmpFilename(null);
csvFileInfo.setFriendlyFilename(null);
csvFileInfo.setContents(null);
csvFileInfo.setEncoding(null);
// Delete CSV file.
File csvFile = new File(path + File.separatorChar + csvFileName);
if (csvFile.exists()) {
csvFile.delete();
}
// Delete STAGING database table.
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(datasource.getCsvModelInfo().getStageTableName());
} catch (CsvTransformGeneratorException e) {
logger.error(e);
}
}
// Update datasourceModel with the new modelState
modelState = xs.toXML(datasource);
logicalModel.setProperty("datasourceModel", modelState);
}
}
Aggregations