use of com.google.common.io.ByteSource in project buck by facebook.
the class ThriftArtifactCache method storeImpl.
@Override
protected void storeImpl(final ArtifactInfo info, final Path file, final HttpArtifactCacheEvent.Finished.Builder eventBuilder) throws IOException {
final ByteSource artifact = new ByteSource() {
@Override
public InputStream openStream() throws IOException {
return projectFilesystem.newFileInputStream(file);
}
};
BuckCacheStoreRequest storeRequest = new BuckCacheStoreRequest();
ArtifactMetadata artifactMetadata = infoToMetadata(info, artifact, repository, scheduleType, distributedBuildModeEnabled);
storeRequest.setMetadata(artifactMetadata);
PayloadInfo payloadInfo = new PayloadInfo();
long artifactSizeBytes = artifact.size();
payloadInfo.setSizeBytes(artifactSizeBytes);
BuckCacheRequest cacheRequest = new BuckCacheRequest();
cacheRequest.addToPayloads(payloadInfo);
cacheRequest.setType(BuckCacheRequestType.STORE);
cacheRequest.setStoreRequest(storeRequest);
if (LOG.isVerboseEnabled()) {
LOG.verbose(String.format("Storing artifact with metadata: [%s].", ThriftUtil.thriftToDebugJson(artifactMetadata)));
}
final ThriftArtifactCacheProtocol.Request request = ThriftArtifactCacheProtocol.createRequest(PROTOCOL, cacheRequest, artifact);
Request.Builder builder = toOkHttpRequest(request);
eventBuilder.getStoreBuilder().setRequestSizeBytes(request.getRequestLengthBytes());
try (HttpResponse httpResponse = storeClient.makeRequest(hybridThriftEndpoint, builder)) {
if (httpResponse.statusCode() != 200) {
throw new IOException(String.format("Failed to store cache artifact with HTTP status code [%d:%s] " + " to url [%s] for build target [%s] that has size [%d] bytes.", httpResponse.statusCode(), httpResponse.statusMessage(), httpResponse.requestUrl(), info.getBuildTarget().orElse(null), artifactSizeBytes));
}
try (ThriftArtifactCacheProtocol.Response response = ThriftArtifactCacheProtocol.parseResponse(PROTOCOL, httpResponse.getBody())) {
BuckCacheResponse cacheResponse = response.getThriftData();
if (!cacheResponse.isWasSuccessful()) {
reportFailure("Failed to store artifact with thriftErrorMessage=[%s] " + "url=[%s] artifactSizeBytes=[%d]", response.getThriftData().getErrorMessage(), httpResponse.requestUrl(), artifactSizeBytes);
}
eventBuilder.getStoreBuilder().setArtifactContentHash(storeRequest.getMetadata().artifactPayloadMd5);
eventBuilder.getStoreBuilder().setWasStoreSuccessful(cacheResponse.isWasSuccessful());
if (LOG.isDebugEnabled()) {
LOG.debug("Debug info for cache store request: artifactMetadata=[%s] response=[%s]", ThriftUtil.thriftToDebugJson(artifactMetadata), ThriftUtil.thriftToDebugJson(cacheResponse));
}
}
}
}
use of com.google.common.io.ByteSource in project buck by facebook.
the class DefaultProjectFilesystemDelegate method computeSha1.
@Override
public Sha1HashCode computeSha1(Path pathRelativeToProjectRootOrJustAbsolute) throws IOException {
final Path fileToHash = getPathForRelativePath(pathRelativeToProjectRootOrJustAbsolute);
// Normally, we would just use `Files.hash(fileToHash.toFile(), Hashing.sha1())`, but if
// fileToHash is backed by Jimfs, its toFile() method throws an UnsupportedOperationException.
// Creating the input stream via java.nio.file.Files.newInputStream() avoids this issue.
ByteSource source = new ByteSource() {
@Override
public InputStream openStream() throws IOException {
// which already buffers.
return Files.newInputStream(fileToHash);
}
};
HashCode hashCode = source.hash(Hashing.sha1());
return Sha1HashCode.fromHashCode(hashCode);
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class GoogleTaskLogsTest method testStreamTaskLogWithPositiveOffset.
@Test
public void testStreamTaskLogWithPositiveOffset() throws Exception {
final String testLog = "hello this is a log";
final String logPath = prefix + "/" + taskid;
expect(storage.exists(bucket, logPath)).andReturn(true);
expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
replayAll();
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, 5);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
Assert.assertEquals(writer.toString(), testLog.substring(5));
verifyAll();
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class AzureTaskLogsTest method testStreamTaskLogWithPositiveOffset.
@Test
public void testStreamTaskLogWithPositiveOffset() throws Exception {
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
replayAll();
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 5);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
Assert.assertEquals(writer.toString(), testLog.substring(5));
verifyAll();
}
use of com.google.common.io.ByteSource in project druid by druid-io.
the class AzureTaskLogsTest method testStreamTaskLogWithoutOffset.
@Test
public void testStreamTaskLogWithoutOffset() throws Exception {
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(new ByteArrayInputStream(testLog.getBytes(Charsets.UTF_8)));
replayAll();
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 0);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
Assert.assertEquals(writer.toString(), testLog);
verifyAll();
}
Aggregations