Search in sources :

Example 11 with ByteSource

use of com.google.common.io.ByteSource in project druid by druid-io.

the class GoogleTaskLogsTest method testStreamTaskLogWithoutOffset.

@Test
public void testStreamTaskLogWithoutOffset() throws Exception {
    final String testLog = "hello this is a log";
    final String logPath = prefix + "/" + taskid;
    expect(storage.exists(bucket, logPath)).andReturn(true);
    expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
    expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
    replayAll();
    final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, 0);
    final StringWriter writer = new StringWriter();
    IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
    Assert.assertEquals(writer.toString(), testLog);
    verifyAll();
}
Also used : StringWriter(java.io.StringWriter) ByteArrayInputStream(java.io.ByteArrayInputStream) ByteSource(com.google.common.io.ByteSource) Test(org.junit.Test)

Example 12 with ByteSource

use of com.google.common.io.ByteSource in project druid by druid-io.

the class AzureDataSegmentPuller method getSegmentFiles.

public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles(final String containerName, final String blobPath, final File outDir) throws SegmentLoadingException {
    try {
        prepareOutDir(outDir);
        final ByteSource byteSource = new AzureByteSource(azureStorage, containerName, blobPath);
        final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(byteSource, outDir, AzureUtils.AZURE_RETRY, true);
        log.info("Loaded %d bytes from [%s] to [%s]", result.size(), blobPath, outDir.getAbsolutePath());
        return result;
    } catch (IOException e) {
        try {
            FileUtils.deleteDirectory(outDir);
        } catch (IOException ioe) {
            log.warn(ioe, "Failed to remove output directory [%s] for segment pulled from [%s]", outDir.getAbsolutePath(), blobPath);
        }
        throw new SegmentLoadingException(e, e.getMessage());
    }
}
Also used : FileUtils(org.apache.commons.io.FileUtils) SegmentLoadingException(io.druid.segment.loading.SegmentLoadingException) ByteSource(com.google.common.io.ByteSource) IOException(java.io.IOException)

Example 13 with ByteSource

use of com.google.common.io.ByteSource in project druid by druid-io.

the class AzureTaskLogs method streamTaskLog.

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
    final String container = config.getContainer();
    final String taskKey = getTaskLogKey(taskid);
    try {
        if (!azureStorage.getBlobExists(container, taskKey)) {
            return Optional.absent();
        }
        return Optional.<ByteSource>of(new ByteSource() {

            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long length = azureStorage.getBlobLength(container, taskKey);
                    if (offset > 0 && offset < length) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < length) {
                        start = length + offset;
                    } else {
                        start = 0;
                    }
                    InputStream stream = azureStorage.getBlobInputStream(container, taskKey);
                    stream.skip(start);
                    return stream;
                } catch (Exception e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (StorageException | URISyntaxException e) {
        throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
    }
}
Also used : InputStream(java.io.InputStream) ByteSource(com.google.common.io.ByteSource) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) StorageException(com.microsoft.azure.storage.StorageException) StorageException(com.microsoft.azure.storage.StorageException) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException)

Example 14 with ByteSource

use of com.google.common.io.ByteSource in project karaf by apache.

the class SshKeyFormatTest method usePemKey.

@Test
public void usePemKey() throws Exception {
    SshClient client = SshClient.setUpDefaultClient();
    URL testPemURL = Resources.getResource(SshKeyFormatTest.class, "test.pem");
    ByteSource source = Resources.asByteSource(testPemURL);
    PKCS8Key pkcs8 = new PKCS8Key(source.openStream(), null);
    String sshPort = getSshPort();
    client.setServerKeyVerifier(new RequiredServerKeyVerifier(pkcs8.getPublicKey()));
    client.start();
    ConnectFuture future = client.connect("karaf", "localhost", Integer.parseInt(sshPort));
    future.await();
    ClientSession session = future.getSession();
    Set<ClientSessionEvent> ret = EnumSet.of(ClientSessionEvent.WAIT_AUTH);
    while (ret.contains(ClientSessionEvent.WAIT_AUTH)) {
        session.addPasswordIdentity("karaf");
        session.auth().verify();
        ret = session.waitFor(EnumSet.of(ClientSessionEvent.WAIT_AUTH, ClientSessionEvent.CLOSED, ClientSessionEvent.AUTHED), 0);
    }
    if (ret.contains(ClientSessionEvent.CLOSED)) {
        throw new Exception("Could not open SSH channel");
    }
    session.close(true);
}
Also used : PKCS8Key(org.apache.commons.ssl.PKCS8Key) ClientSessionEvent(org.apache.sshd.client.session.ClientSession.ClientSessionEvent) SshClient(org.apache.sshd.client.SshClient) RequiredServerKeyVerifier(org.apache.sshd.client.keyverifier.RequiredServerKeyVerifier) ClientSession(org.apache.sshd.client.session.ClientSession) ByteSource(com.google.common.io.ByteSource) ConnectFuture(org.apache.sshd.client.future.ConnectFuture) URL(java.net.URL) Test(org.junit.Test)

Example 15 with ByteSource

use of com.google.common.io.ByteSource in project bazel by bazelbuild.

the class FileSystemUtils method readContentWithLimit.

/**
   * Reads at most {@code limit} bytes from {@code inputFile} and returns it as a byte array.
   *
   * @throws IOException if there was an error.
   */
public static byte[] readContentWithLimit(Path inputFile, int limit) throws IOException {
    Preconditions.checkArgument(limit >= 0, "limit needs to be >=0, but it is %s", limit);
    ByteSource byteSource = asByteSource(inputFile);
    byte[] buffer = new byte[limit];
    try (InputStream inputStream = byteSource.openBufferedStream()) {
        int read = ByteStreams.read(inputStream, buffer, 0, limit);
        return read == limit ? buffer : Arrays.copyOf(buffer, read);
    }
}
Also used : InputStream(java.io.InputStream) ByteSource(com.google.common.io.ByteSource)

Aggregations

ByteSource (com.google.common.io.ByteSource)138 IOException (java.io.IOException)58 Test (org.junit.Test)58 InputStream (java.io.InputStream)42 ByteArrayInputStream (java.io.ByteArrayInputStream)33 File (java.io.File)33 ContentItemImpl (ddf.catalog.content.data.impl.ContentItemImpl)18 Metacard (ddf.catalog.data.Metacard)17 ContentItem (ddf.catalog.content.data.ContentItem)16 StringWriter (java.io.StringWriter)14 FileInputStream (java.io.FileInputStream)13 Test (org.junit.jupiter.api.Test)12 URI (java.net.URI)11 Path (java.nio.file.Path)11 ArrayList (java.util.ArrayList)11 URL (java.net.URL)10 CreateStorageRequestImpl (ddf.catalog.content.operation.impl.CreateStorageRequestImpl)9 ByteArrayOutputStream (java.io.ByteArrayOutputStream)9 TemporaryFileBackedOutputStream (org.codice.ddf.platform.util.TemporaryFileBackedOutputStream)9 FilterInputStream (java.io.FilterInputStream)8