use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class CsvInputDialog method getCSV.
// Get the data layout
private void getCSV() {
InputStream inputStream = null;
try {
CsvInputMeta meta = new CsvInputMeta();
getInfo(meta);
String filename = transMeta.environmentSubstitute(meta.getFilename());
String delimiter = transMeta.environmentSubstitute(meta.getDelimiter());
String enclosure = transMeta.environmentSubstitute(meta.getEnclosure());
FileObject fileObject = KettleVFS.getFileObject(filename);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException(BaseMessages.getString(PKG, "CsvInput.Log.OnlyLocalFilesAreSupported"));
}
wFields.table.removeAll();
inputStream = KettleVFS.getInputStream(fileObject);
String realEncoding = transMeta.environmentSubstitute(meta.getEncoding());
InputStreamReader reader;
if (Utils.isEmpty(realEncoding)) {
reader = new InputStreamReader(inputStream);
} else {
reader = new InputStreamReader(inputStream, realEncoding);
}
EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding());
// Read a line of data to determine the number of rows...
//
String line = TextFileInput.getLine(log, reader, encodingType, TextFileInputMeta.FILE_FORMAT_UNIX, new StringBuilder(1000));
// Split the string, header or data into parts...
//
String[] fieldNames = CsvInput.guessStringsFromLine(log, line, delimiter, enclosure, meta.getEscapeCharacter());
if (!meta.isHeaderPresent()) {
// Don't use field names from the header...
// Generate field names F1 ... F10
//
DecimalFormat df = new DecimalFormat("000");
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = "Field_" + df.format(i);
}
} else {
if (!Utils.isEmpty(meta.getEnclosure())) {
for (int i = 0; i < fieldNames.length; i++) {
if (fieldNames[i].startsWith(meta.getEnclosure()) && fieldNames[i].endsWith(meta.getEnclosure()) && fieldNames[i].length() > 1) {
fieldNames[i] = fieldNames[i].substring(1, fieldNames[i].length() - 1);
}
}
}
}
//
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = Const.trim(fieldNames[i]);
}
//
for (int i = 0; i < fieldNames.length; i++) {
TableItem item = new TableItem(wFields.table, SWT.NONE);
item.setText(1, fieldNames[i]);
item.setText(2, ValueMetaFactory.getValueMetaName(ValueMetaInterface.TYPE_STRING));
}
wFields.removeEmptyRows();
wFields.setRowNums();
wFields.optWidth(true);
// Now we can continue reading the rows of data and we can guess the
// Sample a few lines to determine the correct type of the fields...
//
String shellText = BaseMessages.getString(PKG, "CsvInputDialog.LinesToSample.DialogTitle");
String lineText = BaseMessages.getString(PKG, "CsvInputDialog.LinesToSample.DialogMessage");
EnterNumberDialog end = new EnterNumberDialog(shell, 100, shellText, lineText);
int samples = end.open();
if (samples >= 0) {
getInfo(meta);
TextFileCSVImportProgressDialog pd = new TextFileCSVImportProgressDialog(shell, meta, transMeta, reader, samples, true);
String message = pd.open();
if (message != null) {
wFields.removeAll();
// OK, what's the result of our search?
getData(meta, false);
wFields.removeEmptyRows();
wFields.setRowNums();
wFields.optWidth(true);
EnterTextDialog etd = new EnterTextDialog(shell, BaseMessages.getString(PKG, "CsvInputDialog.ScanResults.DialogTitle"), BaseMessages.getString(PKG, "CsvInputDialog.ScanResults.DialogMessage"), message, true);
etd.setReadOnly();
etd.open();
// asyncUpdatePreview();
}
}
} catch (IOException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "CsvInputDialog.IOError.DialogTitle"), BaseMessages.getString(PKG, "CsvInputDialog.IOError.DialogMessage"), e);
} catch (KettleException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "System.Dialog.Error.Title"), BaseMessages.getString(PKG, "CsvInputDialog.ErrorGettingFileDesc.DialogMessage"), e);
} finally {
try {
inputStream.close();
} catch (Exception e) {
// Ignore close errors
}
}
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project jackrabbit by apache.
the class VFSBackend method deleteOlderRecursive.
/**
* Deletes any descendant record files under {@code folderObject} if the record files are older than {@code timestamp},
* and push all the deleted record identifiers into {@code deleteIdSet}.
* @param deleteIdSet set to store all the deleted record identifiers
* @param folderObject folder object to start with
* @param timestamp timestamp
* @throws FileSystemException if any file system exception occurs
* @throws DataStoreException if any file system exception occurs
*/
private void deleteOlderRecursive(Set<DataIdentifier> deleteIdSet, FileObject folderObject, long timestamp) throws FileSystemException, DataStoreException {
FileType type;
DataIdentifier identifier;
for (FileObject fileObject : VFSUtils.getChildFileOrFolders(folderObject)) {
type = fileObject.getType();
if (type == FileType.FOLDER) {
deleteOlderRecursive(deleteIdSet, fileObject, timestamp);
synchronized (this) {
if (!VFSUtils.hasAnyChildFileOrFolder(fileObject)) {
fileObject.delete();
}
}
} else if (type == FileType.FILE) {
long lastModified = getLastModifiedTime(fileObject);
if (lastModified < timestamp) {
identifier = new DataIdentifier(fileObject.getName().getBaseName());
if (getDataStore().confirmDelete(identifier)) {
getDataStore().deleteFromCache(identifier);
if (LOG.isInfoEnabled()) {
LOG.info("Deleting old file " + fileObject.getName().getFriendlyURI() + " modified: " + new Timestamp(lastModified).toString() + " length: " + fileObject.getContent().getSize());
}
if (deleteRecordFileObject(fileObject)) {
deleteIdSet.add(identifier);
} else {
LOG.warn("Failed to delete old file " + fileObject.getName().getFriendlyURI());
}
}
}
}
}
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project jackrabbit by apache.
the class VFSUtils method getChildrenOfTypes.
private static List<FileObject> getChildrenOfTypes(FileObject folderObject, Set<FileType> fileTypes) throws DataStoreException {
try {
String folderBaseName = folderObject.getName().getBaseName();
FileObject[] children = folderObject.getChildren();
List<FileObject> files = new ArrayList<FileObject>(children.length);
String childBaseName;
for (int i = 0; i < children.length; i++) {
childBaseName = children[i].getName().getBaseName();
FileType fileType = null;
try {
fileType = children[i].getType();
} catch (FileSystemException notDetermineTypeEx) {
if (folderBaseName.equals(childBaseName)) {
// Ignore this case.
// Some WebDAV server or VFS seems to include the folder itself as child as imaginary file type,
// and throw FileSystemException saying "Could not determine the type of file" in this case.
} else {
throw notDetermineTypeEx;
}
}
if (fileType != null && fileTypes.contains(fileType)) {
files.add(children[i]);
}
}
return files;
} catch (FileSystemException e) {
throw new DataStoreException("Could not find children under " + folderObject.getName().getFriendlyURI(), e);
}
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class ParGzipCsvInputDialog method getCSV.
// Get the data layout
private void getCSV() {
InputStream inputStream = null;
try {
ParGzipCsvInputMeta meta = new ParGzipCsvInputMeta();
getInfo(meta);
String filename = transMeta.environmentSubstitute(meta.getFilename());
FileObject fileObject = KettleVFS.getFileObject(filename);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException(BaseMessages.getString(PKG, "ParGzipCsvInput.Log.OnlyLocalFilesAreSupported"));
}
wFields.table.removeAll();
inputStream = new GZIPInputStream(KettleVFS.getInputStream(fileObject));
InputStreamReader reader = new InputStreamReader(inputStream);
EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding());
// Read a line of data to determine the number of rows...
//
String line = TextFileInput.getLine(log, reader, encodingType, TextFileInputMeta.FILE_FORMAT_MIXED, new StringBuilder(1000));
// Split the string, header or data into parts...
//
String[] fieldNames = Const.splitString(line, meta.getDelimiter());
if (!meta.isHeaderPresent()) {
// Don't use field names from the header...
// Generate field names F1 ... F10
//
DecimalFormat df = new DecimalFormat("000");
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = "Field_" + df.format(i);
}
} else {
if (!Utils.isEmpty(meta.getEnclosure())) {
for (int i = 0; i < fieldNames.length; i++) {
if (fieldNames[i].startsWith(meta.getEnclosure()) && fieldNames[i].endsWith(meta.getEnclosure()) && fieldNames[i].length() > 1) {
fieldNames[i] = fieldNames[i].substring(1, fieldNames[i].length() - 1);
}
}
}
}
//
for (int i = 0; i < fieldNames.length; i++) {
fieldNames[i] = Const.trim(fieldNames[i]);
}
//
for (int i = 0; i < fieldNames.length; i++) {
TableItem item = new TableItem(wFields.table, SWT.NONE);
item.setText(1, fieldNames[i]);
item.setText(2, ValueMetaFactory.getValueMetaName(ValueMetaInterface.TYPE_STRING));
}
wFields.removeEmptyRows();
wFields.setRowNums();
wFields.optWidth(true);
// Now we can continue reading the rows of data and we can guess the
// Sample a few lines to determine the correct type of the fields...
//
String shellText = BaseMessages.getString(PKG, "ParGzipCsvInputDialog.LinesToSample.DialogTitle");
String lineText = BaseMessages.getString(PKG, "ParGzipCsvInputDialog.LinesToSample.DialogMessage");
EnterNumberDialog end = new EnterNumberDialog(shell, 100, shellText, lineText);
int samples = end.open();
if (samples >= 0) {
getInfo(meta);
TextFileCSVImportProgressDialog pd = new TextFileCSVImportProgressDialog(shell, meta, transMeta, reader, samples, true);
String message = pd.open();
if (message != null) {
wFields.removeAll();
// OK, what's the result of our search?
getData(meta);
wFields.removeEmptyRows();
wFields.setRowNums();
wFields.optWidth(true);
EnterTextDialog etd = new EnterTextDialog(shell, BaseMessages.getString(PKG, "ParGzipCsvInputDialog.ScanResults.DialogTitle"), BaseMessages.getString(PKG, "ParGzipCsvInputDialog.ScanResults.DialogMessage"), message, true);
etd.setReadOnly();
etd.open();
}
}
} catch (IOException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "ParGzipCsvInputDialog.IOError.DialogTitle"), BaseMessages.getString(PKG, "ParGzipCsvInputDialog.IOError.DialogMessage"), e);
} catch (KettleException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "System.Dialog.Error.Title"), BaseMessages.getString(PKG, "ParGzipCsvInputDialog.ErrorGettingFileDesc.DialogMessage"), e);
} finally {
try {
inputStream.close();
} catch (Exception e) {
// Ignore errors
}
}
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class AccessInputMeta method getValueMetaAndData.
/**
* Returns kettle type from Microsoft Access database also convert data to prepare kettle value
*
* @param : MS Access column
* @param : destination field name
* @param : MS Access column value
* @return valuemeta and data
*/
public static ValueMetaAndData getValueMetaAndData(Column c, String name, Object data) {
ValueMetaAndData valueMetaData = new ValueMetaAndData();
// get data
Object o = data;
// Get column type
DataType type = c.getType();
int sourceValueType = ValueMetaInterface.TYPE_STRING;
// We have to take of Meta AND data
switch(type) {
case BINARY:
sourceValueType = ValueMetaInterface.TYPE_BINARY;
break;
case BOOLEAN:
sourceValueType = ValueMetaInterface.TYPE_BOOLEAN;
if (o != null) {
o = Boolean.valueOf(o.toString());
}
break;
case DOUBLE:
sourceValueType = ValueMetaInterface.TYPE_NUMBER;
break;
case FLOAT:
sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER;
if (o != null) {
o = new BigDecimal(Float.toString((Float) o));
}
break;
case INT:
sourceValueType = ValueMetaInterface.TYPE_NUMBER;
if (o != null) {
o = Double.parseDouble(o.toString());
}
break;
case BYTE:
sourceValueType = ValueMetaInterface.TYPE_NUMBER;
if (o != null) {
o = Double.parseDouble(o.toString());
}
break;
case LONG:
sourceValueType = ValueMetaInterface.TYPE_INTEGER;
if (o != null) {
Integer i = (Integer) o;
o = i.longValue();
}
break;
case MEMO:
break;
case MONEY:
sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER;
break;
case NUMERIC:
sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER;
break;
case SHORT_DATE_TIME:
sourceValueType = ValueMetaInterface.TYPE_DATE;
break;
default:
// Default it's string
if (o != null) {
o = o.toString();
}
break;
}
ValueMetaInterface sourceValueMeta;
try {
sourceValueMeta = ValueMetaFactory.createValueMeta(name == null ? c.getName() : name, sourceValueType);
} catch (KettlePluginException e) {
sourceValueMeta = new ValueMetaNone(name == null ? c.getName() : name);
}
sourceValueMeta.setLength(c.getLength(), c.getPrecision());
// set value meta data and return it
valueMetaData.setValueMeta(sourceValueMeta);
if (o != null) {
valueMetaData.setValueData(o);
}
return valueMetaData;
}
Aggregations