use of org.pentaho.platform.dataaccess.datasource.wizard.csv.CsvUtils in project data-access by pentaho.
the class CsvDatasourceServiceImpl method getPreviewRows.
public List<String> getPreviewRows(String filename, boolean isFirstRowHeader, int rows, String encoding) throws Exception {
checkPermissions();
List<String> previewRows = null;
if (!StringUtils.isEmpty(filename)) {
CsvUtils service = new CsvUtils();
ModelInfo mi = service.getFileContents("", filename, ",", "\"", rows, isFirstRowHeader, // $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
encoding);
previewRows = mi.getFileInfo().getContents();
}
return previewRows;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.csv.CsvUtils in project data-access by pentaho.
the class CsvDatasourceServiceImpl method stageFile.
public ModelInfo stageFile(String fileName, String delimiter, String enclosure, boolean isFirstRowHeader, String encoding) throws Exception {
checkPermissions();
ModelInfo modelInfo;
fileName = FilenameUtils.getName(fileName);
try {
int headerRows = isFirstRowHeader ? 1 : 0;
modelInfo = new CsvUtils().generateFields("", fileName, AgileHelper.getCsvSampleRowSize(), delimiter, enclosure, headerRows, true, true, // $NON-NLS-1$
encoding);
} catch (FileNotFoundException e) {
logger.error(e);
throw new Exception("File was not found: " + fileName);
} catch (Exception e) {
logger.error(e);
throw e;
}
return modelInfo;
}
use of org.pentaho.platform.dataaccess.datasource.wizard.csv.CsvUtils in project data-access by pentaho.
the class MetadataDatasourceService method uploadServletImportMetadataDatasource.
/**
* @param localizeBundleEntries
* @param domainId
* Unique identifier for the metadata datasource
* @param metadataFile
* Input stream for the metadata.xmi
*
* @return Response containing the success of the method
*
* @throws PentahoAccessControlException
* Thrown when validation of access fails
*/
@PUT
@Path("/uploadServletImport")
@Consumes({ TEXT_PLAIN })
@Produces("text/plain")
@Deprecated
@Facet(name = "Unsupported")
public Response uploadServletImportMetadataDatasource(String localizeBundleEntries, @QueryParam("domainId") String domainId, @QueryParam("metadataFile") String metadataFile) throws PentahoAccessControlException {
try {
DatasourceService.validateAccess();
} catch (PentahoAccessControlException e) {
return Response.serverError().entity(e.toString()).build();
}
IMetadataDomainRepository metadataDomainRepository = PentahoSystem.get(IMetadataDomainRepository.class, PentahoSessionHolder.getSession());
PentahoMetadataDomainRepository metadataImporter = new PentahoMetadataDomainRepository(PentahoSystem.get(IUnifiedRepository.class));
CsvUtils csvUtils = new CsvUtils();
boolean validPropertyFiles = true;
StringBuffer invalidFiles = new StringBuffer();
try {
String TMP_FILE_PATH = File.separatorChar + "system" + File.separatorChar + "tmp" + File.separatorChar;
String sysTmpDir = PentahoSystem.getApplicationContext().getSolutionPath(TMP_FILE_PATH);
FileInputStream metadataInputStream = new FileInputStream(sysTmpDir + File.separatorChar + metadataFile);
metadataImporter.storeDomain(metadataInputStream, domainId, true);
metadataDomainRepository.getDomain(domainId);
StringTokenizer bundleEntriesParam = new StringTokenizer(localizeBundleEntries, ";");
while (bundleEntriesParam.hasMoreTokens()) {
String localizationBundleElement = bundleEntriesParam.nextToken();
StringTokenizer localizationBundle = new StringTokenizer(localizationBundleElement, "=");
String localizationFileName = localizationBundle.nextToken();
String localizationFile = localizationBundle.nextToken();
if (localizationFileName.endsWith(".properties")) {
String encoding = csvUtils.getEncoding(localizationFile);
if (ENCODINGS.contains(encoding)) {
for (final Pattern propertyBundlePattern : patterns) {
final Matcher propertyBundleMatcher = propertyBundlePattern.matcher(localizationFileName);
if (propertyBundleMatcher.matches()) {
FileInputStream bundleFileInputStream = new FileInputStream(sysTmpDir + File.separatorChar + localizationFile);
metadataImporter.addLocalizationFile(domainId, propertyBundleMatcher.group(2), bundleFileInputStream, true);
break;
}
}
} else {
validPropertyFiles = false;
invalidFiles.append(localizationFileName);
}
} else {
validPropertyFiles = false;
invalidFiles.append(localizationFileName);
}
}
if (!validPropertyFiles) {
return Response.serverError().entity(Messages.getString("MetadataDatasourceService.ERROR_002_PROPERTY_FILES_ERROR") + invalidFiles.toString()).build();
}
return Response.ok("SUCCESS").type(MediaType.TEXT_PLAIN).build();
} catch (Exception e) {
metadataImporter.removeDomain(domainId);
return Response.serverError().entity(Messages.getString("MetadataDatasourceService.ERROR_001_METADATA_DATASOURCE_ERROR")).build();
}
}
use of org.pentaho.platform.dataaccess.datasource.wizard.csv.CsvUtils in project data-access by pentaho.
the class CsvDatasourceServiceImpl method getEncoding.
public String getEncoding(String fileName) {
checkPermissions();
String encoding = null;
try {
CsvUtils csvModelService = new CsvUtils();
encoding = csvModelService.getEncoding(fileName);
} catch (Exception e) {
logger.error(e);
}
return encoding;
}
Aggregations