use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class FileObjectContentLocation method exists.
/**
* Checks, whether an content entity with the given name exists in this content location. This method will report
* invalid filenames as non-existent.
*
* @param name the name of the new entity.
* @return true, if an entity exists with this name, false otherwise.
*/
public boolean exists(final String name) {
if (RepositoryUtilities.isInvalidPathName(name)) {
return false;
}
try {
final FileObject file = getBackend();
final FileObject child = file.resolveFile(name);
return child.exists();
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class FileObjectContentLocation method listContents.
/**
* Lists all content entities stored in this content-location. This method filters out all files that have an invalid
* name (according to the repository rules).
*
* @return the content entities for this location.
* @throws ContentIOException if an repository error occured.
*/
public ContentEntity[] listContents() throws ContentIOException {
try {
final FileObject file = getBackend();
final FileObject[] files = file.getChildren();
final ContentEntity[] entities = new ContentEntity[files.length];
for (int i = 0; i < files.length; i++) {
final FileObject child = files[i];
if (RepositoryUtilities.isInvalidPathName(child.getPublicURIString())) {
continue;
}
if (child.isFolder()) {
entities[i] = new FileObjectContentLocation(this, child);
} else if (child.isFile()) {
entities[i] = new FileObjectContentLocation(this, child);
}
}
return entities;
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class FileObjectContentLocation method getEntry.
/**
* Returns the content entity with the given name. If the entity does not exist, an Exception will be raised.
*
* @param name the name of the entity to be retrieved.
* @return the content entity for this name, never null.
* @throws ContentIOException if an repository error occured.
*/
public ContentEntity getEntry(final String name) throws ContentIOException {
try {
if (RepositoryUtilities.isInvalidPathName(name)) {
throw new IllegalArgumentException("The name given is not valid.");
}
final FileObject file = getBackend();
final FileObject child = file.resolveFile(name);
if (child.exists() == false) {
throw new ContentIOException("Not found:" + child);
}
if (child.isFolder()) {
return new FileObjectContentLocation(this, child);
} else if (child.isFile()) {
return new FileObjectContentItem(this, child);
} else {
throw new ContentIOException("Not File nor directory.");
}
} catch (FileSystemException e) {
throw new RuntimeException(e);
}
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class JobEntryUnZipTest method unzipPostProcessingTest.
@Test
public void unzipPostProcessingTest() throws Exception {
JobEntryUnZip jobEntryUnZip = new JobEntryUnZip();
Method unzipPostprocessingMethod = jobEntryUnZip.getClass().getDeclaredMethod("doUnzipPostProcessing", FileObject.class, FileObject.class, String.class);
unzipPostprocessingMethod.setAccessible(true);
FileObject sourceFileObject = Mockito.mock(FileObject.class);
Mockito.doReturn(Mockito.mock(FileName.class)).when(sourceFileObject).getName();
// delete
jobEntryUnZip.afterunzip = 1;
unzipPostprocessingMethod.invoke(jobEntryUnZip, sourceFileObject, Mockito.mock(FileObject.class), "");
Mockito.verify(sourceFileObject, Mockito.times(1)).delete();
// move
jobEntryUnZip.afterunzip = 2;
unzipPostprocessingMethod.invoke(jobEntryUnZip, sourceFileObject, Mockito.mock(FileObject.class), "");
Mockito.verify(sourceFileObject, Mockito.times(1)).moveTo(Mockito.anyObject());
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class PartitioningIT method testPartitioningRepartitioningOnCluster.
/**
* This test reads a CSV file in parallel on the cluster, one copy per slave.<br>
* It then partitions the data on id in 12 partitions (4 per slave).<br>
* After that it re-partitions the data in 9 partitions (3 per slave).<br>
* As such we expect 9 result files on disk.<br>
* File: "partitioning-repartitioning-on-cluster.ktr"<br>
*/
public void testPartitioningRepartitioningOnCluster() throws Exception {
init();
ClusterGenerator clusterGenerator = new ClusterGenerator();
try {
clusterGenerator.launchSlaveServers();
TransMeta transMeta = loadAndModifyTestTransformation(clusterGenerator, "src/it/resources/org/pentaho/di/cluster/partitioning-repartitioning-on-cluster.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
long nrErrors = Trans.monitorClusteredTransformation(new LogChannel("cluster unit test <testParallelFileReadOnMaster>"), transSplitter, null, 1);
assertEquals(0L, nrErrors);
String[] results = new String[] { "8", "9", "9", "9", "9", "8", "8", "8", "8", "8", "8", "8" };
String[] files = new String[] { "000", "001", "002", "003", "004", "005", "006", "007", "008", "009", "010", "011" };
for (int i = 0; i < results.length; i++) {
String filename = "${java.io.tmpdir}/partitioning-repartitioning-on-cluster-" + files[i] + ".txt";
String result = loadFileContent(transMeta, filename);
assertEqualsIgnoreWhitespacesAndCase(results[i], result);
// Remove the output file : we don't want to leave too much clutter around
//
FileObject file = KettleVFS.getFileObject(transMeta.environmentSubstitute(filename));
file.delete();
}
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
} finally {
try {
clusterGenerator.stopSlaveServers();
} catch (Exception e) {
e.printStackTrace();
fail(e.toString());
}
}
}
Aggregations