use of org.apache.commons.vfs2.provider.local.LocalFile in project javautils by jiadongpo.
the class FtpVFS method testsftp.
/**
* 测试末通过
*
* @throws Exception
*/
public void testsftp() throws Exception {
FileSystemManager fsManager = VFS.getManager();
FileSystemOptions opts = new FileSystemOptions();
SftpFileSystemConfigBuilder.getInstance().setStrictHostKeyChecking(opts, "no");
// 显示目录
FileObject fo = fsManager.resolveFile("sftp://ci:Zj4xyBkgjd@10.151.30.10:22/apps/tomcat7-40-tomcat-air-ticket-merchant/logs/", opts);
// 得到远程文件列表
FileObject[] children = fo.getChildren();
for (int i = 0; i < children.length; i++) {
FileObject f = children[i];
FileContent c = f.getContent();
File localFile = new File(f.getName().getBaseName());
FileOutputStream out = new FileOutputStream(localFile);
// 写入本地
org.apache.commons.io.IOUtils.copy(c.getInputStream(), out);
// 或使用写入
FileObject obj = fsManager.resolveFile(this.getTargetResourceURL() + f.getName().getBaseName());
if (!obj.exists()) {
obj.createFile();
obj.copyFrom(f, Selectors.SELECT_SELF);
}
final long size = (f.getType() == FileType.FILE) ? c.getSize() : -1;
final long date = (f.getType() == FileType.FILE) ? c.getLastModifiedTime() : -1;
System.out.println(f.getName().getPath() + " date:" + date + " Size:" + size);
}
}
use of org.apache.commons.vfs2.provider.local.LocalFile in project Orthanc_Tools by salimkanoun.
the class ExportFiles method export.
public void export() {
FileInputStream fis = null;
try {
File localFile = new File(this.filePath);
switch(chosenOption) {
case OPTION_FTP:
FTPClient client = new FTPClient();
client.connect(this.serverAdress, this.port);
client.login(this.login, this.pwd);
client.enterLocalPassiveMode();
client.setFileType(FTPClient.BINARY_FILE_TYPE);
String remoteFile = this.remotePath + this.remoteFileName;
fis = new FileInputStream(localFile);
client.storeFile(remoteFile, fis);
client.logout();
break;
case OPTION_SFTP:
StandardFileSystemManager manager = new StandardFileSystemManager();
manager.init();
FileObject localFileSFTP = manager.resolveFile(this.filePath);
FileObject remoteFileSFTP = manager.resolveFile("sftp://" + this.login + ":" + this.pwd + "@" + this.serverAdress + ":" + this.port + this.remotePath + this.remoteFileName);
// Copy local file to sftp server
remoteFileSFTP.copyFrom(localFileSFTP, Selectors.SELECT_SELF);
manager.close();
break;
case OPTION_WEBDAV:
fis = new FileInputStream(localFile);
Sardine sardine = SardineFactory.begin(this.login, this.pwd);
sardine.put(this.serverAdress + this.remotePath + this.remoteFileName, fis);
break;
default:
break;
}
} catch (IOException e) {
// Getting the exception message, that we will give to VueAnon with getResult
this.result = e.getMessage();
} finally {
try {
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
use of org.apache.commons.vfs2.provider.local.LocalFile in project pentaho-kettle by pentaho.
the class KettleVFS method getOutputStream.
public static OutputStream getOutputStream(FileObject fileObject, boolean append) throws IOException {
FileObject parent = fileObject.getParent();
if (parent != null) {
if (!parent.exists()) {
throw new IOException(BaseMessages.getString(PKG, "KettleVFS.Exception.ParentDirectoryDoesNotExist", getFriendlyURI(parent)));
}
}
try {
fileObject.createFile();
FileContent content = fileObject.getContent();
return content.getOutputStream(append);
} catch (FileSystemException e) {
//
if (fileObject instanceof LocalFile) {
try {
String filename = getFilename(fileObject);
return new FileOutputStream(new File(filename), append);
} catch (Exception e2) {
// throw the original exception: hide the retry.
throw e;
}
} else {
throw e;
}
}
}
use of org.apache.commons.vfs2.provider.local.LocalFile in project pentaho-kettle by pentaho.
the class JobEntryMssqlBulkLoad method execute.
public Result execute(Result previousResult, int nr) {
String TakeFirstNbrLines = "";
String LineTerminatedby = "";
String FieldTerminatedby = "";
boolean useFieldSeparator = false;
String UseCodepage = "";
String ErrorfileName = "";
Result result = previousResult;
result.setResult(false);
String vfsFilename = environmentSubstitute(filename);
FileObject fileObject = null;
// Let's check the filename ...
if (!Utils.isEmpty(vfsFilename)) {
try {
// User has specified a file, We can continue ...
//
// This is running over VFS but we need a normal file.
// As such, we're going to verify that it's a local file...
// We're also going to convert VFS FileObject to File
//
fileObject = KettleVFS.getFileObject(vfsFilename, this);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.OnlyLocalFileSupported", vfsFilename));
}
// Convert it to a regular platform specific file name
//
String realFilename = KettleVFS.getFilename(fileObject);
// Here we go... back to the regular scheduled program...
//
File file = new File(realFilename);
if (file.exists() && file.canRead()) {
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.FileExists.Label", realFilename));
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
if (!(db.getDatabaseMeta().getDatabaseInterface() instanceof MSSQLServerDatabaseMeta)) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.DbNotMSSQL", connection.getDatabaseName()));
return result;
}
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.TableExists.Label", realTablename));
}
// FIELDTERMINATOR
String Fieldterminator = getRealFieldTerminator();
if (Utils.isEmpty(Fieldterminator) && (datafiletype.equals("char") || datafiletype.equals("widechar"))) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FieldTerminatorMissing"));
return result;
} else {
if (datafiletype.equals("char") || datafiletype.equals("widechar")) {
useFieldSeparator = true;
FieldTerminatedby = "FIELDTERMINATOR='" + Fieldterminator + "'";
}
}
// Check Specific Code page
if (codepage.equals("Specific")) {
String realCodePage = environmentSubstitute(codepage);
if (specificcodepage.length() < 0) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.SpecificCodePageMissing"));
return result;
} else {
UseCodepage = "CODEPAGE = '" + realCodePage + "'";
}
} else {
UseCodepage = "CODEPAGE = '" + codepage + "'";
}
// Check Error file
String realErrorFile = environmentSubstitute(errorfilename);
if (realErrorFile != null) {
File errorfile = new File(realErrorFile);
if (errorfile.exists() && !adddatetime) {
// The error file is created when the command is executed. An error occurs if the file already
// exists.
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.ErrorFileExists"));
return result;
}
if (adddatetime) {
// Add date time to filename...
SimpleDateFormat daf = new SimpleDateFormat();
Date now = new Date();
daf.applyPattern("yyyMMdd_HHmmss");
String d = daf.format(now);
ErrorfileName = "ERRORFILE ='" + realErrorFile + "_" + d + "'";
} else {
ErrorfileName = "ERRORFILE ='" + realErrorFile + "'";
}
}
// ROWTERMINATOR
String Rowterminator = getRealLineterminated();
if (!Utils.isEmpty(Rowterminator)) {
LineTerminatedby = "ROWTERMINATOR='" + Rowterminator + "'";
}
// Start file at
if (startfile > 0) {
TakeFirstNbrLines = "FIRSTROW=" + startfile;
}
// End file at
if (endfile > 0) {
TakeFirstNbrLines = "LASTROW=" + endfile;
}
// Truncate table?
String SQLBULKLOAD = "";
if (truncate) {
SQLBULKLOAD = "TRUNCATE TABLE " + realTablename + ";";
}
// Build BULK Command
SQLBULKLOAD = SQLBULKLOAD + "BULK INSERT " + realTablename + " FROM " + "'" + realFilename.replace('\\', '/') + "'";
SQLBULKLOAD = SQLBULKLOAD + " WITH (";
if (useFieldSeparator) {
SQLBULKLOAD = SQLBULKLOAD + FieldTerminatedby;
} else {
SQLBULKLOAD = SQLBULKLOAD + "DATAFILETYPE ='" + datafiletype + "'";
}
if (LineTerminatedby.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + LineTerminatedby;
}
if (TakeFirstNbrLines.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + TakeFirstNbrLines;
}
if (UseCodepage.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + UseCodepage;
}
String realFormatFile = environmentSubstitute(formatfilename);
if (realFormatFile != null) {
SQLBULKLOAD = SQLBULKLOAD + ", FORMATFILE='" + realFormatFile + "'";
}
if (firetriggers) {
SQLBULKLOAD = SQLBULKLOAD + ",FIRE_TRIGGERS";
}
if (keepnulls) {
SQLBULKLOAD = SQLBULKLOAD + ",KEEPNULLS";
}
if (keepidentity) {
SQLBULKLOAD = SQLBULKLOAD + ",KEEPIDENTITY";
}
if (checkconstraints) {
SQLBULKLOAD = SQLBULKLOAD + ",CHECK_CONSTRAINTS";
}
if (tablock) {
SQLBULKLOAD = SQLBULKLOAD + ",TABLOCK";
}
if (orderby != null) {
SQLBULKLOAD = SQLBULKLOAD + ",ORDER ( " + orderby + " " + orderdirection + ")";
}
if (ErrorfileName.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", " + ErrorfileName;
}
if (maxerrors > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", MAXERRORS=" + maxerrors;
}
if (batchsize > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", BATCHSIZE=" + batchsize;
}
if (rowsperbatch > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", ROWS_PER_BATCH=" + rowsperbatch;
}
// End of Bulk command
SQLBULKLOAD = SQLBULKLOAD + ")";
try {
// Run the SQL
db.execStatement(SQLBULKLOAD);
// Everything is OK...we can disconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (KettleDatabaseException je) {
result.setNrErrors(1);
logError("An error occurred executing this job entry : " + je.getMessage(), je);
} catch (KettleFileException e) {
logError("An error occurred executing this job entry : " + e.getMessage(), e);
result.setNrErrors(1);
} finally {
if (db != null) {
db.disconnect();
db = null;
}
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.TableNotExists", realTablename));
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this entry: " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nodatabase.Label"));
}
} else {
// the file doesn't exist
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FileNotExists", realFilename));
}
} catch (Exception e) {
// An unexpected error occurred
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.UnexpectedError.Label"), e);
} finally {
try {
if (fileObject != null) {
fileObject.close();
}
} catch (Exception e) {
// Ignore errors
}
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nofilename.Label"));
}
return result;
}
use of org.apache.commons.vfs2.provider.local.LocalFile in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkLoad method execute.
public Result execute(Result previousResult, int nr) {
String ReplaceIgnore;
String IgnoreNbrLignes = "";
String ListOfColumn = "";
String LocalExec = "";
String PriorityText = "";
String LineTerminatedby = "";
String FieldTerminatedby = "";
Result result = previousResult;
result.setResult(false);
String vfsFilename = environmentSubstitute(filename);
// Let's check the filename ...
if (!Utils.isEmpty(vfsFilename)) {
try {
// User has specified a file, We can continue ...
//
// This is running over VFS but we need a normal file.
// As such, we're going to verify that it's a local file...
// We're also going to convert VFS FileObject to File
//
FileObject fileObject = KettleVFS.getFileObject(vfsFilename, this);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException("Only local files are supported at this time, file [" + vfsFilename + "] is not a local file.");
}
// Convert it to a regular platform specific file name
//
String realFilename = KettleVFS.getFilename(fileObject);
// Here we go... back to the regular scheduled program...
//
File file = new File(realFilename);
if ((file.exists() && file.canRead()) || isLocalInfile() == false) {
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] exists.");
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed("Table [" + realTablename + "] exists.");
}
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
// Set the REPLACE or IGNORE
if (isReplacedata()) {
ReplaceIgnore = "REPLACE";
} else {
ReplaceIgnore = "IGNORE";
}
// Set the IGNORE LINES
if (Const.toInt(getRealIgnorelines(), 0) > 0) {
IgnoreNbrLignes = "IGNORE " + getRealIgnorelines() + " LINES";
}
// Set list of Column
if (getRealListattribut() != null) {
ListOfColumn = "(" + MysqlString(getRealListattribut()) + ")";
}
// Local File execution
if (isLocalInfile()) {
LocalExec = "LOCAL";
}
// Prority
if (prorityvalue == 1) {
// LOW
PriorityText = "LOW_PRIORITY";
} else if (prorityvalue == 2) {
// CONCURRENT
PriorityText = "CONCURRENT";
}
// Fields ....
if (getRealSeparator() != null || getRealEnclosed() != null || getRealEscaped() != null) {
FieldTerminatedby = "FIELDS ";
if (getRealSeparator() != null) {
FieldTerminatedby = FieldTerminatedby + "TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
}
if (getRealEnclosed() != null) {
FieldTerminatedby = FieldTerminatedby + " ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
}
if (getRealEscaped() != null) {
FieldTerminatedby = FieldTerminatedby + " ESCAPED BY '" + Const.replace(getRealEscaped(), "'", "''") + "'";
}
}
// LINES ...
if (getRealLinestarted() != null || getRealLineterminated() != null) {
LineTerminatedby = "LINES ";
// Line starting By
if (getRealLinestarted() != null) {
LineTerminatedby = LineTerminatedby + "STARTING BY '" + Const.replace(getRealLinestarted(), "'", "''") + "'";
}
// Line terminating By
if (getRealLineterminated() != null) {
LineTerminatedby = LineTerminatedby + " TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
}
}
String SQLBULKLOAD = "LOAD DATA " + PriorityText + " " + LocalExec + " INFILE '" + realFilename.replace('\\', '/') + "' " + ReplaceIgnore + " INTO TABLE " + realTablename + " " + FieldTerminatedby + " " + LineTerminatedby + " " + IgnoreNbrLignes + " " + ListOfColumn + ";";
try {
// Run the SQL
db.execStatement(SQLBULKLOAD);
// Everything is OK...we can deconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add zip filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (KettleDatabaseException je) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this job entry : " + je.getMessage());
} catch (KettleFileException e) {
logError("An error occurred executing this job entry : " + e.getMessage());
result.setNrErrors(1);
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
if (log.isDetailed()) {
logDetailed("Table [" + realTablename + "] doesn't exist!");
}
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this entry: " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nodatabase.Label"));
}
} else {
// the file doesn't exist
result.setNrErrors(1);
logError("File [" + realFilename + "] doesn't exist!");
}
} catch (Exception e) {
// An unexpected error occurred
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.UnexpectedError.Label"), e);
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nofilename.Label"));
}
return result;
}
Aggregations