use of java.io.FileOutputStream in project tomcat by apache.
the class SignCode method extractFilesFromApplicationString.
/**
* Removes base64 encoding, unzips the files and writes the new files over
* the top of the old ones.
*/
private static void extractFilesFromApplicationString(String data, List<File> files) throws IOException {
ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(data));
try (ZipInputStream zis = new ZipInputStream(bais)) {
byte[] buf = new byte[32 * 1024];
for (int i = 0; i < files.size(); i++) {
try (FileOutputStream fos = new FileOutputStream(files.get(i))) {
zis.getNextEntry();
int numRead;
while ((numRead = zis.read(buf)) >= 0) {
fos.write(buf, 0, numRead);
}
}
}
}
}
use of java.io.FileOutputStream in project zeppelin by apache.
the class PySparkInterpreter method createPythonScript.
private void createPythonScript() {
ClassLoader classLoader = getClass().getClassLoader();
File out = new File(scriptPath);
if (out.exists() && out.isDirectory()) {
throw new InterpreterException("Can't create python script " + out.getAbsolutePath());
}
try {
FileOutputStream outStream = new FileOutputStream(out);
IOUtils.copy(classLoader.getResourceAsStream("python/zeppelin_pyspark.py"), outStream);
outStream.close();
} catch (IOException e) {
throw new InterpreterException(e);
}
logger.info("File {} created", scriptPath);
}
use of java.io.FileOutputStream in project storm by apache.
the class Utils method downloadResourcesAsSupervisorAttempt.
private static boolean downloadResourcesAsSupervisorAttempt(ClientBlobStore cb, String key, String localFile) {
boolean isSuccess = false;
try (FileOutputStream out = new FileOutputStream(localFile);
InputStreamWithMeta in = cb.getBlob(key)) {
long fileSize = in.getFileLength();
byte[] buffer = new byte[1024];
int len;
int downloadFileSize = 0;
while ((len = in.read(buffer)) >= 0) {
out.write(buffer, 0, len);
downloadFileSize += len;
}
isSuccess = (fileSize == downloadFileSize);
} catch (TException | IOException e) {
LOG.error("An exception happened while downloading {} from blob store.", localFile, e);
}
if (!isSuccess) {
try {
Files.deleteIfExists(Paths.get(localFile));
} catch (IOException ex) {
LOG.error("Failed trying to delete the partially downloaded {}", localFile, ex);
}
}
return isSuccess;
}
use of java.io.FileOutputStream in project storm by apache.
the class Utils method extractDirFromJarImpl.
public void extractDirFromJarImpl(String jarpath, String dir, File destdir) {
try (JarFile jarFile = new JarFile(jarpath)) {
Enumeration<JarEntry> jarEnums = jarFile.entries();
while (jarEnums.hasMoreElements()) {
JarEntry entry = jarEnums.nextElement();
if (!entry.isDirectory() && entry.getName().startsWith(dir)) {
File aFile = new File(destdir, entry.getName());
aFile.getParentFile().mkdirs();
try (FileOutputStream out = new FileOutputStream(aFile);
InputStream in = jarFile.getInputStream(entry)) {
IOUtils.copy(in, out);
}
}
}
} catch (IOException e) {
LOG.info("Could not extract {} from {}", dir, jarpath);
}
}
use of java.io.FileOutputStream in project hive by apache.
the class QTestUtil method cliInit.
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp(tname);
createSources(tname);
}
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
Utilities.clearWorkMap(conf);
CliSessionState ss = createSessionState();
assert ss != null;
ss.in = System.in;
String outFileExtension = getOutFileExtension(tname);
String stdoutName = null;
if (outDir != null) {
// TODO: why is this needed?
File qf = new File(outDir, tname);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = tname + outFileExtension;
}
File outf = new File(logDir, stdoutName);
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (qSortQuerySet.contains(tname)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(tname)) {
ss.out = new DigestPrintStream(fo, "UTF-8");
} else if (qSortNHashQuerySet.contains(tname)) {
ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
} else {
ss.out = new PrintStream(fo, true, "UTF-8");
}
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
boolean canReuseSession = !qNoSessionReuseQuerySet.contains(tname);
if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
// Copy the tezSessionState from the old CliSessionState.
tezSessionState = oldSs.getTezSession();
oldSs.setTezSession(null);
ss.setTezSession(tezSessionState);
oldSs.close();
}
if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
sparkSession = oldSs.getSparkSession();
ss.setSparkSession(sparkSession);
oldSs.setSparkSession(null);
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
cliDriver = new CliDriver();
if (tname.equals("init_file.q")) {
ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
Aggregations