use of java.io.BufferedOutputStream in project che by eclipse.
the class DeltaProcessingState method saveExternalLibTimeStamps.
public void saveExternalLibTimeStamps() throws CoreException {
if (this.externalTimeStamps == null)
return;
// cleanup to avoid any leak ( https://bugs.eclipse.org/bugs/show_bug.cgi?id=244849 )
HashSet toRemove = new HashSet();
if (this.roots != null) {
Enumeration keys = this.externalTimeStamps.keys();
while (keys.hasMoreElements()) {
Object key = keys.nextElement();
if (this.roots.get(key) == null) {
toRemove.add(key);
}
}
}
File timestamps = getTimeStampsFile();
DataOutputStream out = null;
try {
out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(timestamps)));
out.writeInt(this.externalTimeStamps.size() - toRemove.size());
Iterator entries = this.externalTimeStamps.entrySet().iterator();
while (entries.hasNext()) {
Map.Entry entry = (Map.Entry) entries.next();
IPath key = (IPath) entry.getKey();
if (!toRemove.contains(key)) {
out.writeUTF(key.toPortableString());
Long timestamp = (Long) entry.getValue();
out.writeLong(timestamp.longValue());
}
}
} catch (IOException e) {
//$NON-NLS-1$
IStatus status = new Status(IStatus.ERROR, JavaCore.PLUGIN_ID, IStatus.ERROR, "Problems while saving timestamps", e);
throw new CoreException(status);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
// nothing we can do: ignore
}
}
}
}
use of java.io.BufferedOutputStream in project hive by apache.
the class QTestUtil method cliInit.
public String cliInit(String tname, boolean recreate) throws Exception {
if (recreate) {
cleanUp(tname);
createSources(tname);
}
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
Utilities.clearWorkMap(conf);
CliSessionState ss = createSessionState();
assert ss != null;
ss.in = System.in;
String outFileExtension = getOutFileExtension(tname);
String stdoutName = null;
if (outDir != null) {
// TODO: why is this needed?
File qf = new File(outDir, tname);
stdoutName = qf.getName().concat(outFileExtension);
} else {
stdoutName = tname + outFileExtension;
}
File outf = new File(logDir, stdoutName);
OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
if (qSortQuerySet.contains(tname)) {
ss.out = new SortPrintStream(fo, "UTF-8");
} else if (qHashQuerySet.contains(tname)) {
ss.out = new DigestPrintStream(fo, "UTF-8");
} else if (qSortNHashQuerySet.contains(tname)) {
ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
} else {
ss.out = new PrintStream(fo, true, "UTF-8");
}
ss.err = new CachingPrintStream(fo, true, "UTF-8");
ss.setIsSilent(true);
SessionState oldSs = SessionState.get();
boolean canReuseSession = !qNoSessionReuseQuerySet.contains(tname);
if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
// Copy the tezSessionState from the old CliSessionState.
tezSessionState = oldSs.getTezSession();
oldSs.setTezSession(null);
ss.setTezSession(tezSessionState);
oldSs.close();
}
if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
sparkSession = oldSs.getSparkSession();
ss.setSparkSession(sparkSession);
oldSs.setSparkSession(null);
oldSs.close();
}
if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
oldSs.out.close();
}
SessionState.start(ss);
cliDriver = new CliDriver();
if (tname.equals("init_file.q")) {
ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
}
cliDriver.processInitFiles(ss);
return outf.getAbsolutePath();
}
use of java.io.BufferedOutputStream in project tomcat by apache.
the class ManagerServlet method uploadWar.
/**
* Upload the WAR file included in this request, and store it at the
* specified file location.
*
* @param writer Writer to render to
* @param request The servlet request we are processing
* @param war The file into which we should store the uploaded WAR
* @param smClient The StringManager used to construct i18n messages based
* on the Locale of the client
*
* @exception IOException if an I/O error occurs during processing
*/
protected void uploadWar(PrintWriter writer, HttpServletRequest request, File war, StringManager smClient) throws IOException {
if (war.exists() && !war.delete()) {
String msg = smClient.getString("managerServlet.deleteFail", war);
throw new IOException(msg);
}
try (ServletInputStream istream = request.getInputStream();
BufferedOutputStream ostream = new BufferedOutputStream(new FileOutputStream(war), 1024)) {
byte[] buffer = new byte[1024];
while (true) {
int n = istream.read(buffer);
if (n < 0) {
break;
}
ostream.write(buffer, 0, n);
}
} catch (IOException e) {
if (war.exists() && !war.delete()) {
writer.println(smClient.getString("managerServlet.deleteFail", war));
}
throw e;
}
}
use of java.io.BufferedOutputStream in project zookeeper by apache.
the class FileSnap method serialize.
/**
* serialize the datatree and session into the file snapshot
* @param dt the datatree to be serialized
* @param sessions the sessions to be serialized
* @param snapShot the file to store snapshot into
*/
public synchronized void serialize(DataTree dt, Map<Long, Integer> sessions, File snapShot) throws IOException {
if (!close) {
OutputStream sessOS = new BufferedOutputStream(new FileOutputStream(snapShot));
CheckedOutputStream crcOut = new CheckedOutputStream(sessOS, new Adler32());
//CheckedOutputStream cout = new CheckedOutputStream()
OutputArchive oa = BinaryOutputArchive.getArchive(crcOut);
FileHeader header = new FileHeader(SNAP_MAGIC, VERSION, dbId);
serialize(dt, sessions, oa, header);
long val = crcOut.getChecksum().getValue();
oa.writeLong(val, "val");
oa.writeString("/", "path");
sessOS.flush();
crcOut.close();
sessOS.close();
}
}
use of java.io.BufferedOutputStream in project cw-omnibus by commonsguy.
the class ZipUtils method unzip.
public static void unzip(File zipFile, File destDir, String subtreeInZip) throws UnzipException, IOException {
if (destDir.exists()) {
deleteContents(destDir);
} else {
destDir.mkdirs();
}
try {
final FileInputStream fis = new FileInputStream(zipFile);
final ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis));
ZipEntry entry;
int entries = 0;
long total = 0;
try {
while ((entry = zis.getNextEntry()) != null) {
if (subtreeInZip == null || entry.getName().startsWith(subtreeInZip)) {
int bytesRead;
final byte[] data = new byte[BUFFER_SIZE];
final String zipCanonicalPath = validateZipEntry(entry.getName().substring(subtreeInZip.length()), destDir);
if (entry.isDirectory()) {
new File(zipCanonicalPath).mkdir();
} else {
final FileOutputStream fos = new FileOutputStream(zipCanonicalPath);
final BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE);
while (total + BUFFER_SIZE <= DEFAULT_MAX_SIZE && (bytesRead = zis.read(data, 0, BUFFER_SIZE)) != -1) {
dest.write(data, 0, bytesRead);
total += bytesRead;
}
dest.flush();
fos.getFD().sync();
dest.close();
if (total + BUFFER_SIZE > DEFAULT_MAX_SIZE) {
throw new IllegalStateException("Too much output from ZIP");
}
}
zis.closeEntry();
entries++;
if (entries > DEFAULT_MAX_ENTRIES) {
throw new IllegalStateException("Too many entries in ZIP");
}
}
}
} finally {
zis.close();
}
} catch (Throwable t) {
if (destDir.exists()) {
delete(destDir);
}
throw new UnzipException("Problem in unzip operation, rolling back", t);
}
}
Aggregations