use of org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo in project data-access by pentaho.
the class DSWDatasourceServiceImpl method prepareForSerializaton.
public void prepareForSerializaton(Domain domain) {
/*
* This method is responsible for cleaning up legacy information when
* changing datasource types and also manages CSV files for CSV based
* datasources.
*/
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(FileUtils.DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
String path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
LogicalModel logicalModel = domain.getLogicalModels().get(0);
String modelState = (String) logicalModel.getProperty("datasourceModel");
if (modelState != null) {
XStream xs = new XStream();
DatasourceDTO datasource = (DatasourceDTO) xs.fromXML(modelState);
CsvFileInfo csvFileInfo = datasource.getCsvModelInfo().getFileInfo();
String csvFileName = csvFileInfo.getFilename();
if (csvFileName != null) {
// Cleanup logic when updating from CSV datasource to SQL
// datasource.
csvFileInfo.setFilename(null);
csvFileInfo.setTmpFilename(null);
csvFileInfo.setFriendlyFilename(null);
csvFileInfo.setContents(null);
csvFileInfo.setEncoding(null);
// Delete CSV file.
File csvFile = new File(path + File.separatorChar + csvFileName);
if (csvFile.exists()) {
csvFile.delete();
}
// Delete STAGING database table.
CsvTransformGenerator csvTransformGenerator = new CsvTransformGenerator(datasource.getCsvModelInfo(), AgileHelper.getDatabaseMeta());
try {
csvTransformGenerator.dropTable(datasource.getCsvModelInfo().getStageTableName());
} catch (CsvTransformGeneratorException e) {
logger.error(e);
}
}
// Update datasourceModel with the new modelState
modelState = xs.toXML(datasource);
logicalModel.setProperty("datasourceModel", modelState);
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo in project data-access by pentaho.
the class CsvUtils method getFileContents.
// $NON-NLS-1$ //$NON-NLS-2$
public ModelInfo getFileContents(String project, String name, String delimiter, String enclosure, int rows, boolean isFirstRowHeader, String encoding) throws Exception {
String path;
if (name.endsWith(".tmp")) {
// $NON-NLS-1$
path = PentahoSystem.getApplicationContext().getSolutionPath(TMP_FILE_PATH);
} else {
String relativePath = PentahoSystem.getSystemSetting("file-upload-defaults/relative-path", // $NON-NLS-1$
String.valueOf(DEFAULT_RELATIVE_UPLOAD_FILE_PATH));
path = PentahoSystem.getApplicationContext().getSolutionPath(relativePath);
}
String fileLocation = path + name;
ModelInfo result = new ModelInfo();
CsvFileInfo fileInfo = new CsvFileInfo();
fileInfo.setTmpFilename(name);
result.setFileInfo(fileInfo);
fileInfo.setContents(getLinesList(fileLocation, rows, encoding));
fileInfo.setDelimiter(delimiter);
fileInfo.setEnclosure(enclosure);
fileInfo.setHeaderRows(0);
// now try to generate some columns
return result;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo in project data-access by pentaho.
the class CsvUtils method generateFields.
/* package-local visibility for testing purposes */
ModelInfo generateFields(String project, String fileLocation, String filename, int rowLimit, String delimiter, String enclosure, int headerRows, boolean doData, boolean doColumns, String encoding) throws Exception {
ModelInfo result = new ModelInfo();
CsvFileInfo fileInfo = new CsvFileInfo();
result.setFileInfo(fileInfo);
CsvInspector inspector = new CsvInspector();
String sampleLine = getLines(fileLocation, 1, encoding);
int fileType = inspector.determineFileFormat(sampleLine);
String contents = getLines(fileLocation, rowLimit, encoding);
fileInfo.setContents(getLinesList(fileLocation, rowLimit, encoding));
if (delimiter.equals("")) {
// $NON-NLS-1$
delimiter = inspector.guessDelimiter(contents);
// $NON-NLS-1$
enclosure = "\"";
headerRows = 0;
}
fileInfo.setDelimiter(delimiter);
fileInfo.setEnclosure(enclosure);
fileInfo.setHeaderRows(headerRows);
// Resolves the file encoding using icu4j.
fileInfo.setEncoding(encoding);
fileInfo.setProject(project);
fileInfo.setTmpFilename(filename);
DataProfile data = getDataProfile(fileInfo, rowLimit, fileLocation, fileType, encoding);
if (doData) {
result.setData(data.getRows());
}
if (doColumns) {
result.setColumns(data.getColumns());
}
return result;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.models.CsvFileInfo in project data-access by pentaho.
the class TestUtil method createModel.
public static ModelInfo createModel() {
CsvFileInfo fileInfo = new CsvFileInfo();
fileInfo.setTmpFilename("unit_test.csv");
fileInfo.setProject("testsolution");
fileInfo.setHeaderRows(1);
fileInfo.setDelimiter(",");
fileInfo.setEnclosure("\"");
ColumnInfo[] columns = new ColumnInfo[9];
columns[0] = new ColumnInfo();
columns[0].setDataType(DataType.NUMERIC);
columns[0].setPrecision(0);
columns[0].setId("PC_0");
columns[0].setTitle("REGIONC");
columns[0].setIndex(true);
columns[0].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[0].setAggregateType(AggregationType.SUM.toString());
columns[1] = new ColumnInfo();
columns[1].setDataType(DataType.NUMERIC);
columns[1].setId("PC_1");
columns[1].setTitle("NWEIGHT");
columns[1].setPrecision(5);
columns[1].setIndex(true);
columns[1].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[1].setAggregateType(AggregationType.SUM.toString());
columns[2] = new ColumnInfo();
columns[2].setDataType(DataType.NUMERIC);
columns[2].setId("PC_2");
columns[2].setTitle("Int");
columns[2].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[2].setAggregateType(AggregationType.SUM.toString());
columns[3] = new ColumnInfo();
columns[3].setDataType(DataType.DATE);
columns[3].setId("PC_3");
columns[3].setTitle("xdate");
columns[3].setFormat("mm/dd/yy");
columns[3].setIndex(true);
columns[3].setFieldType(ColumnInfo.FIELD_TYPE_DIMENSION);
columns[3].setAggregateType(AggregationType.NONE.toString());
columns[4] = new ColumnInfo();
columns[4].setDataType(DataType.STRING);
columns[4].setId("PC_4");
columns[4].setTitle("");
columns[4].setIgnore(true);
columns[4].setFieldType(ColumnInfo.FIELD_TYPE_DIMENSION);
columns[4].setAggregateType(AggregationType.NONE.toString());
columns[5] = new ColumnInfo();
columns[5].setDataType(DataType.STRING);
columns[5].setId("PC_5");
columns[5].setTitle("Location");
columns[5].setIndex(true);
columns[5].setLength(60);
columns[5].setFieldType(ColumnInfo.FIELD_TYPE_DIMENSION);
columns[5].setAggregateType(AggregationType.NONE.toString());
columns[6] = new ColumnInfo();
columns[6].setDataType(DataType.NUMERIC);
columns[6].setId("PC_6");
columns[6].setTitle("charlen");
columns[6].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[6].setAggregateType(AggregationType.SUM.toString());
columns[7] = new ColumnInfo();
columns[7].setDataType(DataType.NUMERIC);
columns[7].setId("PC_7");
columns[7].setTitle("xfactor");
columns[7].setPrecision(7);
columns[7].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[7].setAggregateType(AggregationType.SUM.toString());
columns[8] = new ColumnInfo();
columns[8].setDataType(DataType.BOOLEAN);
columns[8].setId("PC_8");
columns[8].setTitle("Flag");
columns[8].setIndex(true);
columns[8].setFieldType(ColumnInfo.FIELD_TYPE_BOTH);
columns[8].setAggregateType(AggregationType.SUM.toString());
ModelInfo info = new ModelInfo();
info.setFileInfo(fileInfo);
info.setColumns(columns);
info.setStageTableName("UNIT_TESTS");
return info;
}
Aggregations