use of java.io.OutputStream in project flink by apache.
the class BlobClient method deleteInternal.
/**
* Delete one or multiple BLOBs from the BLOB server.
*
* @param jobId The job ID to identify the BLOB(s) to be deleted.
* @param key The key to identify the specific BLOB to delete or <code>null</code> to delete
* all BLOBs associated with the job id.
* @param bKey The blob key to identify a specific content addressable BLOB. This parameter
* is exclusive with jobId and key.
* @throws IOException Thrown if an I/O error occurs while transferring the request to the BLOB server.
*/
private void deleteInternal(JobID jobId, String key, BlobKey bKey) throws IOException {
if ((jobId != null && bKey != null) || (jobId == null && bKey == null)) {
throw new IllegalArgumentException();
}
try {
final OutputStream outputStream = this.socket.getOutputStream();
final InputStream inputStream = this.socket.getInputStream();
// Signal type of operation
outputStream.write(DELETE_OPERATION);
// Check if DELETE should be done in content-addressable manner
if (jobId == null) {
// delete blob key
outputStream.write(CONTENT_ADDRESSABLE);
bKey.writeToOutputStream(outputStream);
} else if (key != null) {
// delete BLOB for jobID and name key
outputStream.write(NAME_ADDRESSABLE);
// Send job ID and the key
byte[] idBytes = jobId.getBytes();
byte[] keyBytes = key.getBytes(BlobUtils.DEFAULT_CHARSET);
outputStream.write(idBytes);
writeLength(keyBytes.length, outputStream);
outputStream.write(keyBytes);
} else {
// delete all blobs for JobID
outputStream.write(JOB_ID_SCOPE);
byte[] idBytes = jobId.getBytes();
outputStream.write(idBytes);
}
int response = inputStream.read();
if (response < 0) {
throw new EOFException("Premature end of response");
}
if (response == RETURN_ERROR) {
Throwable cause = readExceptionFromStream(inputStream);
throw new IOException("Server side error: " + cause.getMessage(), cause);
} else if (response != RETURN_OKAY) {
throw new IOException("Unrecognized response");
}
} catch (Throwable t) {
BlobUtils.closeSilently(socket, LOG);
throw new IOException("DELETE operation failed: " + t.getMessage(), t);
}
}
use of java.io.OutputStream in project flink by apache.
the class BlobClient method putInputStream.
/**
* Uploads data from the given input stream to the BLOB server.
*
* @param jobId
* the ID of the job the BLOB belongs to or <code>null</code> to store the BLOB in a content-addressable
* manner
* @param key
* the key to identify the BLOB on the server or <code>null</code> to store the BLOB in a content-addressable
* manner
* @param inputStream
* the input stream to read the data from
* @return he computed BLOB key if the BLOB has been stored in a content-addressable manner, <code>null</code>
* otherwise
* @throws IOException
* thrown if an I/O error occurs while uploading the data to the BLOB server
*/
private BlobKey putInputStream(JobID jobId, String key, InputStream inputStream) throws IOException {
if (this.socket.isClosed()) {
throw new IllegalStateException("BLOB Client is not connected. " + "Client has been shut down or encountered an error before.");
}
if (LOG.isDebugEnabled()) {
if (jobId == null) {
LOG.debug(String.format("PUT content addressable BLOB stream to %s", socket.getLocalSocketAddress()));
} else {
LOG.debug(String.format("PUT BLOB stream under %s / \"%s\" to %s", jobId, key, socket.getLocalSocketAddress()));
}
}
try {
final OutputStream os = this.socket.getOutputStream();
final MessageDigest md = jobId == null ? BlobUtils.createMessageDigest() : null;
final byte[] xferBuf = new byte[BUFFER_SIZE];
// Send the PUT header
sendPutHeader(os, jobId, key);
while (true) {
final int read = inputStream.read(xferBuf);
if (read < 0) {
// we are done. send a -1 and be done
writeLength(-1, os);
break;
}
if (read > 0) {
writeLength(read, os);
os.write(xferBuf, 0, read);
if (md != null) {
md.update(xferBuf, 0, read);
}
}
}
// Receive blob key and compare
final InputStream is = this.socket.getInputStream();
return receivePutResponseAndCompare(is, md);
} catch (Throwable t) {
BlobUtils.closeSilently(socket, LOG);
throw new IOException("PUT operation failed: " + t.getMessage(), t);
}
}
use of java.io.OutputStream in project flink by apache.
the class TestingFailingBlobServer method run.
@Override
public void run() {
// we do properly the first operation (PUT)
try {
new BlobServerConnection(getServerSocket().accept(), this).start();
} catch (Throwable t) {
t.printStackTrace();
}
// do some failing operations
for (int num = 0; num < numFailures && !isShutdown(); num++) {
Socket socket = null;
try {
socket = getServerSocket().accept();
InputStream is = socket.getInputStream();
OutputStream os = socket.getOutputStream();
// just abort everything
is.close();
os.close();
socket.close();
} catch (IOException e) {
} finally {
if (socket != null) {
try {
socket.close();
} catch (Throwable t) {
}
}
}
}
// regular runs
super.run();
}
use of java.io.OutputStream in project flink by apache.
the class CliFrontendYarnAddressConfigurationTest method disableStdOutErr.
@BeforeClass
public static void disableStdOutErr() {
class NullPrint extends OutputStream {
@Override
public void write(int b) {
}
}
PrintStream nullPrinter = new PrintStream(new NullPrint());
System.setOut(nullPrinter);
System.setErr(nullPrinter);
// Unset FLINK_CONF_DIR, as this is a precondition for this test to work properly
Map<String, String> map = new HashMap<>(System.getenv());
map.remove(ConfigConstants.ENV_FLINK_CONF_DIR);
TestBaseUtils.setEnv(map);
}
use of java.io.OutputStream in project groovy by apache.
the class fasta method main.
public static void main(String[] args) throws IOException {
makeCumulative(HomoSapiens);
makeCumulative(IUB);
int n = 2500000;
if (args.length > 0)
n = Integer.parseInt(args[0]);
OutputStream out = System.out;
makeRepeatFasta("ONE", "Homo sapiens alu", ALU, n * 2, out);
makeRandomFasta("TWO", "IUB ambiguity codes", IUB, n * 3, out);
makeRandomFasta("THREE", "Homo sapiens frequency", HomoSapiens, n * 5, out);
out.close();
}
Aggregations