use of org.apache.twill.filesystem.LocationFactory in project cdap by caskdata.
the class CLIMainTest method createAppJarFile.
private static File createAppJarFile(Class<?> cls) throws IOException {
File tmpFolder = TMP_FOLDER.newFolder();
LocationFactory locationFactory = new LocalLocationFactory(tmpFolder);
Location deploymentJar = AppJarHelper.createDeploymentJar(locationFactory, cls);
File appJarFile = new File(tmpFolder, String.format("%s-1.0.%d.jar", cls.getSimpleName(), System.currentTimeMillis()));
Files.copy(Locations.newInputSupplier(deploymentJar), appJarFile);
return appJarFile;
}
use of org.apache.twill.filesystem.LocationFactory in project cdap by caskdata.
the class ProgramGenerationStageTest method testProgramGenerationForToyApp.
@Test
public void testProgramGenerationForToyApp() throws Exception {
cConf.set(Constants.AppFabric.OUTPUT_DIR, "programs");
LocationFactory lf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
// have to do this since we are not going through the route of create namespace -> deploy application
// in real scenarios, the namespace directory would already be created
Location namespaceLocation = lf.create(DefaultId.APPLICATION.getNamespace());
Locations.mkdirsIfNotExists(namespaceLocation);
LocationFactory jarLf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appArchive = AppJarHelper.createDeploymentJar(jarLf, ToyApp.class);
ApplicationSpecification appSpec = Specifications.from(new ToyApp());
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
ProgramGenerationStage pgmStage = new ProgramGenerationStage();
// Can do better here - fixed right now to run the test.
pgmStage.process(new StageContext(Object.class));
pgmStage.process(new ApplicationDeployable(NamespaceId.DEFAULT.artifact("ToyApp", "1.0"), appArchive, DefaultId.APPLICATION, newSpec, null, ApplicationDeployScope.USER));
Assert.assertTrue(true);
}
use of org.apache.twill.filesystem.LocationFactory in project cdap by caskdata.
the class Locations method processLocations.
/**
* Do some processing on the locations contained in the {@code startLocation}, using the {@code processor}. If this
* location is a directory, all the locations contained in it will also be processed. If the {@code recursive} tag
* is set to true, those locations that are directories will also be processed recursively. If the
* {@code startLocation} is not a directory, this method will return the result of the processing of that location.
*
* @param startLocation location to start the processing from
* @param recursive {@code true} if this method should be called on the directory {@link Location}s found from
* {@code startLocation}. If the {@code startLocation} is a directory, all the locations under it
* will be processed, regardless of the value of {@code recursive}
* @param processor used to process locations. If the {@link Processor#process} method returns false on any
* {@link Location} object processed, this method will return the current result of the processor.
* @param <R> Type of the return value
* @throws IOException if the locations could not be read
*/
public static <R> R processLocations(Location startLocation, boolean recursive, Processor<LocationStatus, R> processor) throws IOException {
boolean topLevel = true;
LocationFactory lf = startLocation.getLocationFactory();
LinkedList<LocationStatus> statusStack = new LinkedList<>();
statusStack.push(getLocationStatus(startLocation));
while (!statusStack.isEmpty()) {
LocationStatus status = statusStack.poll();
if (!processor.process(status)) {
return processor.getResult();
}
if (status.isDir() && (topLevel || recursive)) {
topLevel = false;
RemoteIterator<LocationStatus> itor = listLocationStatus(lf.create(status.getUri()));
while (itor.hasNext()) {
statusStack.add(0, itor.next());
}
}
}
return processor.getResult();
}
use of org.apache.twill.filesystem.LocationFactory in project cdap by caskdata.
the class Locations method newInputSupplier.
/**
* Creates a new {@link InputSupplier} that can provides {@link SeekableInputStream} from the given location.
*
* @param location Location for the input stream.
* @return A {@link InputSupplier}.
*/
public static InputSupplier<? extends SeekableInputStream> newInputSupplier(final Location location) {
return new InputSupplier<SeekableInputStream>() {
@Override
public SeekableInputStream getInput() throws IOException {
InputStream input = location.getInputStream();
try {
if (input instanceof FileInputStream) {
return new FileSeekableInputStream((FileInputStream) input);
}
if (input instanceof FSDataInputStream) {
final FSDataInputStream dataInput = (FSDataInputStream) input;
LocationFactory locationFactory = location.getLocationFactory();
if (locationFactory instanceof FileContextLocationFactory) {
final FileContextLocationFactory lf = (FileContextLocationFactory) locationFactory;
return lf.getFileContext().getUgi().doAs(new PrivilegedExceptionAction<SeekableInputStream>() {
@Override
public SeekableInputStream run() throws IOException {
// Disable the FileSystem cache. The FileSystem will be closed when the InputStream is closed
String scheme = lf.getHomeLocation().toURI().getScheme();
Configuration hConf = new Configuration(lf.getConfiguration());
hConf.set(String.format("fs.%s.impl.disable.cache", scheme), "true");
FileSystem fs = FileSystem.get(hConf);
return new DFSSeekableInputStream(dataInput, createDFSStreamSizeProvider(fs, true, new Path(location.toURI()), dataInput));
}
});
}
// This shouldn't happen
return new DFSSeekableInputStream(dataInput, new StreamSizeProvider() {
@Override
public long size() throws IOException {
// Assumption is if the FS is not a HDFS fs, the location length tells the stream size
return location.length();
}
});
}
throw new IOException("Failed to create SeekableInputStream from location " + location);
} catch (Throwable t) {
Closeables.closeQuietly(input);
Throwables.propagateIfInstanceOf(t, IOException.class);
throw new IOException(t);
}
}
};
}
use of org.apache.twill.filesystem.LocationFactory in project cdap by caskdata.
the class SparkPackageUtils method resolveURI.
/**
* Resolves a {@link URI} representation from the given {@link Location}. It resolves the URI in the same way
* as Spark does.
*/
private static URI resolveURI(Location location) throws IOException {
LocationFactory locationFactory = location.getLocationFactory();
while (locationFactory instanceof ForwardingLocationFactory) {
locationFactory = ((ForwardingLocationFactory) locationFactory).getDelegate();
}
if (!(locationFactory instanceof FileContextLocationFactory)) {
return location.toURI();
}
// Resolves the URI the way as Spark does
Configuration hConf = ((FileContextLocationFactory) locationFactory).getConfiguration();
org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(location.toURI().getPath());
path = path.getFileSystem(hConf).makeQualified(path);
return ((FileContextLocationFactory) locationFactory).getFileContext().resolvePath(path).toUri();
}
Aggregations