use of co.cask.cdap.internal.app.runtime.distributed.LocalizeResource in project cdap by caskdata.
the class DistributedSparkProgramRunner method setupLaunchConfig.
@Override
protected void setupLaunchConfig(LaunchConfig launchConfig, Program program, ProgramOptions options, CConfiguration cConf, Configuration hConf, File tempDir) throws IOException {
// Update the container hConf
hConf.setBoolean(SparkRuntimeContextConfig.HCONF_ATTR_CLUSTER_MODE, true);
hConf.set("hive.metastore.token.signature", HiveAuthFactory.HS2_CLIENT_TOKEN);
if (SecurityUtil.isKerberosEnabled(cConf)) {
// Need to divide the interval by 0.8 because Spark logic has a 0.8 discount on the interval
// If we don't offset it, it will look for the new credentials too soon
// Also add 5 seconds to the interval to give master time to push the changes to the Spark client container
hConf.setLong(SparkRuntimeContextConfig.HCONF_ATTR_CREDENTIALS_UPDATE_INTERVAL_MS, (long) ((secureStoreRenewer.getUpdateInterval() + 5000) / 0.8));
}
// Setup the launch config
ApplicationSpecification appSpec = program.getApplicationSpecification();
SparkSpecification spec = appSpec.getSpark().get(program.getName());
Map<String, String> clientArgs = RuntimeArguments.extractScope("task", "client", options.getUserArguments().asMap());
Resources resources = SystemArguments.getResources(clientArgs, spec.getClientResources());
// Add runnable. Only one instance for the spark client
launchConfig.addRunnable(spec.getName(), new SparkTwillRunnable(spec.getName()), resources, 1);
// Add extra resources, classpath, dependencies, env and setup ClassAcceptor
Map<String, LocalizeResource> localizeResources = new HashMap<>();
Map<String, String> extraEnv = new HashMap<>(SparkPackageUtils.getSparkClientEnv());
SparkPackageUtils.prepareSparkResources(sparkCompat, locationFactory, tempDir, localizeResources, extraEnv);
// Add the mapreduce resources and path as well for the InputFormat/OutputFormat classes
MapReduceContainerHelper.localizeFramework(hConf, localizeResources);
extraEnv.put(Constants.SPARK_COMPAT_ENV, sparkCompat.getCompat());
launchConfig.addExtraResources(localizeResources).addExtraDependencies(SparkProgramRuntimeProvider.class).addExtraEnv(extraEnv).addExtraClasspath(MapReduceContainerHelper.addMapReduceClassPath(hConf, new ArrayList<String>())).setClassAcceptor(createBundlerClassAcceptor());
}
use of co.cask.cdap.internal.app.runtime.distributed.LocalizeResource in project cdap by caskdata.
the class SparkRuntimeService method distributedUserResources.
/**
* Appends user localize resources to the given {@link List} of {@link LocalizeResource}.
*/
private void distributedUserResources(Map<String, LocalizeResource> resources, List<LocalizeResource> result) throws URISyntaxException {
for (Map.Entry<String, LocalizeResource> entry : resources.entrySet()) {
URI uri = entry.getValue().getURI();
URI actualURI = new URI(uri.getScheme(), uri.getAuthority(), uri.getPath(), uri.getQuery(), entry.getKey());
result.add(new LocalizeResource(actualURI, entry.getValue().isArchive()));
}
}
use of co.cask.cdap.internal.app.runtime.distributed.LocalizeResource in project cdap by caskdata.
the class LocalizationUtilsTest method testTarFiles.
private void testTarFiles(TarFileType type) throws IOException {
String tarFileName = "target";
// Have to use short file/directory names because TarArchiveOutputStream does not like long paths.
File directory;
File localizationDir;
switch(type) {
case TAR:
directory = TEMP_FOLDER.newFolder("t1");
localizationDir = TEMP_FOLDER.newFolder("localTar");
break;
case TAR_GZ:
directory = TEMP_FOLDER.newFolder("t2");
localizationDir = TEMP_FOLDER.newFolder("localTarGz");
break;
case TGZ:
directory = TEMP_FOLDER.newFolder("t3");
localizationDir = TEMP_FOLDER.newFolder("localTgz");
break;
default:
throw new IllegalArgumentException("Unexpected type: " + type);
}
File file1 = new File(Files.createFile(Paths.get(new File(directory, "f1").toURI())).toUri());
File file2 = new File(Files.createFile(Paths.get(new File(directory, "f2").toURI())).toUri());
File tarFile;
switch(type) {
case TAR:
tarFile = createTarFile(tarFileName, file1, file2);
break;
case TAR_GZ:
tarFile = createTarGzFile(tarFileName, file1, file2);
break;
case TGZ:
tarFile = createTgzFile(tarFileName, file1, file2);
break;
default:
throw new IllegalArgumentException("Unexpected type: " + type);
}
File localizedResource = LocalizationUtils.localizeResource(tarFileName, new LocalizeResource(tarFile, true), localizationDir);
Assert.assertTrue(localizedResource.isDirectory());
File[] files = localizedResource.listFiles();
Assert.assertNotNull(files);
Assert.assertEquals(2, files.length);
for (File file : files) {
String name = file.getName();
Assert.assertTrue(file1.getName().equals(name) || file2.getName().equals(name));
}
}
use of co.cask.cdap.internal.app.runtime.distributed.LocalizeResource in project cdap by caskdata.
the class LocalizationUtilsTest method testJar.
@Test
public void testJar() throws IOException {
String jarFileName = "target";
File directory = TEMP_FOLDER.newFolder("jar");
File libDir = new File(directory, "lib");
Assert.assertTrue(libDir.mkdirs());
File someClassFile = File.createTempFile("SomeClass", ".class", directory);
File someOtherClassFile = File.createTempFile("SomeOtherClass", ".class", directory);
File jarFile = createZipFile(jarFileName, directory, true);
File localizationDir = TEMP_FOLDER.newFolder("localJar");
File localizedResource = LocalizationUtils.localizeResource(jarFileName, new LocalizeResource(jarFile, true), localizationDir);
Assert.assertTrue(localizedResource.isDirectory());
File[] files = localizedResource.listFiles();
Assert.assertNotNull(files);
Assert.assertEquals(3, files.length);
for (File file : files) {
String name = file.getName();
if (libDir.getName().equals(name)) {
Assert.assertTrue(file.isDirectory());
} else {
Assert.assertTrue(someClassFile.getName().equals(name) || someOtherClassFile.getName().equals(name));
}
}
}
use of co.cask.cdap.internal.app.runtime.distributed.LocalizeResource in project cdap by caskdata.
the class SparkPackageUtils method prepareSpark1Framework.
/**
* Prepares the Spark 1 framework on the location
*
* @param sparkConf the spark configuration
* @param locationFactory the {@link LocationFactory} for saving the spark framework jar
* @return A {@link SparkFramework} containing information about the spark framework in localization context.
* @throws IOException If failed to prepare the framework.
*/
private static SparkFramework prepareSpark1Framework(Properties sparkConf, LocationFactory locationFactory) throws IOException {
String sparkYarnJar = sparkConf.getProperty(SPARK_YARN_JAR);
if (sparkYarnJar != null) {
URI sparkYarnJarURI = URI.create(sparkYarnJar);
if (locationFactory.getHomeLocation().toURI().getScheme().equals(sparkYarnJarURI.getScheme())) {
Location frameworkLocation = locationFactory.create(sparkYarnJarURI);
if (frameworkLocation.exists()) {
return new SparkFramework(new LocalizeResource(resolveURI(frameworkLocation), false), SPARK_YARN_JAR);
}
LOG.warn("The location {} set by '{}' does not exist.", frameworkLocation, SPARK_YARN_JAR);
}
}
// If spark.yarn.jar is not defined or doesn't exists, get the spark-assembly jar from local FS and upload it
File sparkAssemblyJar = Iterables.getFirst(getLocalSparkLibrary(SparkCompat.SPARK1_2_10), null);
Location frameworkDir = locationFactory.create("/framework/spark");
Location frameworkLocation = frameworkDir.append(sparkAssemblyJar.getName());
// Upload assembly jar to the framework location if not exists
if (!frameworkLocation.exists()) {
frameworkDir.mkdirs("755");
try (OutputStream os = frameworkLocation.getOutputStream("644")) {
Files.copy(sparkAssemblyJar.toPath(), os);
}
}
return new SparkFramework(new LocalizeResource(resolveURI(frameworkLocation), false), SPARK_YARN_JAR);
}
Aggregations