use of com.google.common.io.ByteSource in project druid by druid-io.
the class GoogleTaskLogsTest method testStreamTaskLogWithoutOffset.
@Test
public void testStreamTaskLogWithoutOffset() throws Exception {
final String testLog = "hello this is a log";
final String logPath = prefix + "/" + taskid;
expect(storage.exists(bucket, logPath)).andReturn(true);
expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
replayAll();
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, 0);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
Assert.assertEquals(writer.toString(), testLog);
verifyAll();
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class AzureDataSegmentPuller method getSegmentFiles.
public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles(final String containerName, final String blobPath, final File outDir) throws SegmentLoadingException {
try {
prepareOutDir(outDir);
final ByteSource byteSource = new AzureByteSource(azureStorage, containerName, blobPath);
final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(byteSource, outDir, AzureUtils.AZURE_RETRY, true);
log.info("Loaded %d bytes from [%s] to [%s]", result.size(), blobPath, outDir.getAbsolutePath());
return result;
} catch (IOException e) {
try {
FileUtils.deleteDirectory(outDir);
} catch (IOException ioe) {
log.warn(ioe, "Failed to remove output directory [%s] for segment pulled from [%s]", outDir.getAbsolutePath(), blobPath);
}
throw new SegmentLoadingException(e, e.getMessage());
}
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class AzureTaskLogs method streamTaskLog.
@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
final String container = config.getContainer();
final String taskKey = getTaskLogKey(taskid);
try {
if (!azureStorage.getBlobExists(container, taskKey)) {
return Optional.absent();
}
return Optional.<ByteSource>of(new ByteSource() {
@Override
public InputStream openStream() throws IOException {
try {
final long start;
final long length = azureStorage.getBlobLength(container, taskKey);
if (offset > 0 && offset < length) {
start = offset;
} else if (offset < 0 && (-1 * offset) < length) {
start = length + offset;
} else {
start = 0;
}
InputStream stream = azureStorage.getBlobInputStream(container, taskKey);
stream.skip(start);
return stream;
} catch (Exception e) {
throw new IOException(e);
}
}
});
} catch (StorageException | URISyntaxException e) {
throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
}
}
use of com.google.common.io.ByteSource in project karaf by apache.
the class SshKeyFormatTest method usePemKey.
@Test
public void usePemKey() throws Exception {
SshClient client = SshClient.setUpDefaultClient();
URL testPemURL = Resources.getResource(SshKeyFormatTest.class, "test.pem");
ByteSource source = Resources.asByteSource(testPemURL);
PKCS8Key pkcs8 = new PKCS8Key(source.openStream(), null);
String sshPort = getSshPort();
client.setServerKeyVerifier(new RequiredServerKeyVerifier(pkcs8.getPublicKey()));
client.start();
ConnectFuture future = client.connect("karaf", "localhost", Integer.parseInt(sshPort));
future.await();
ClientSession session = future.getSession();
Set<ClientSessionEvent> ret = EnumSet.of(ClientSessionEvent.WAIT_AUTH);
while (ret.contains(ClientSessionEvent.WAIT_AUTH)) {
session.addPasswordIdentity("karaf");
session.auth().verify();
ret = session.waitFor(EnumSet.of(ClientSessionEvent.WAIT_AUTH, ClientSessionEvent.CLOSED, ClientSessionEvent.AUTHED), 0);
}
if (ret.contains(ClientSessionEvent.CLOSED)) {
throw new Exception("Could not open SSH channel");
}
session.close(true);
}
use of com.google.common.io.ByteSource in project bazel by bazelbuild.
the class FileSystemUtils method readContentWithLimit.
/**
* Reads at most {@code limit} bytes from {@code inputFile} and returns it as a byte array.
*
* @throws IOException if there was an error.
*/
public static byte[] readContentWithLimit(Path inputFile, int limit) throws IOException {
Preconditions.checkArgument(limit >= 0, "limit needs to be >=0, but it is %s", limit);
ByteSource byteSource = asByteSource(inputFile);
byte[] buffer = new byte[limit];
try (InputStream inputStream = byteSource.openBufferedStream()) {
int read = ByteStreams.read(inputStream, buffer, 0, limit);
return read == limit ? buffer : Arrays.copyOf(buffer, read);
}
}
Aggregations