use of com.google.cloud.ReadChannel in project nifi by apache.
the class FetchGCSObject method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final long startNanos = System.nanoTime();
String bucketName = context.getProperty(BUCKET).evaluateAttributeExpressions(flowFile).getValue();
String key = context.getProperty(KEY).evaluateAttributeExpressions(flowFile).getValue();
Long generation = context.getProperty(GENERATION).evaluateAttributeExpressions(flowFile).asLong();
String encryptionKey = context.getProperty(ENCRYPTION_KEY).evaluateAttributeExpressions(flowFile).getValue();
final Storage storage = getCloudService();
final Map<String, String> attributes = new HashMap<>();
final BlobId blobId = BlobId.of(bucketName, key, generation);
try {
final List<Storage.BlobSourceOption> blobSourceOptions = new ArrayList<>(2);
if (encryptionKey != null) {
blobSourceOptions.add(Storage.BlobSourceOption.decryptionKey(encryptionKey));
}
if (generation != null) {
blobSourceOptions.add(Storage.BlobSourceOption.generationMatch());
}
final Blob blob = storage.get(blobId);
if (blob == null) {
throw new StorageException(404, "Blob " + blobId + " not found");
}
final ReadChannel reader = storage.reader(blobId, blobSourceOptions.toArray(new Storage.BlobSourceOption[blobSourceOptions.size()]));
flowFile = session.importFrom(Channels.newInputStream(reader), flowFile);
attributes.put(BUCKET_ATTR, blob.getBucket());
attributes.put(KEY_ATTR, blob.getName());
if (blob.getSize() != null) {
attributes.put(SIZE_ATTR, String.valueOf(blob.getSize()));
}
if (blob.getCacheControl() != null) {
attributes.put(CACHE_CONTROL_ATTR, blob.getCacheControl());
}
if (blob.getComponentCount() != null) {
attributes.put(COMPONENT_COUNT_ATTR, String.valueOf(blob.getComponentCount()));
}
if (blob.getContentEncoding() != null) {
attributes.put(CONTENT_ENCODING_ATTR, blob.getContentEncoding());
}
if (blob.getContentLanguage() != null) {
attributes.put(CONTENT_LANGUAGE_ATTR, blob.getContentLanguage());
}
if (blob.getContentType() != null) {
attributes.put(CoreAttributes.MIME_TYPE.key(), blob.getContentType());
}
if (blob.getCrc32c() != null) {
attributes.put(CRC32C_ATTR, blob.getCrc32c());
}
if (blob.getCustomerEncryption() != null) {
final BlobInfo.CustomerEncryption encryption = blob.getCustomerEncryption();
attributes.put(ENCRYPTION_ALGORITHM_ATTR, encryption.getEncryptionAlgorithm());
attributes.put(ENCRYPTION_SHA256_ATTR, encryption.getKeySha256());
}
if (blob.getEtag() != null) {
attributes.put(ETAG_ATTR, blob.getEtag());
}
if (blob.getGeneratedId() != null) {
attributes.put(GENERATED_ID_ATTR, blob.getGeneratedId());
}
if (blob.getGeneration() != null) {
attributes.put(GENERATION_ATTR, String.valueOf(blob.getGeneration()));
}
if (blob.getMd5() != null) {
attributes.put(MD5_ATTR, blob.getMd5());
}
if (blob.getMediaLink() != null) {
attributes.put(MEDIA_LINK_ATTR, blob.getMediaLink());
}
if (blob.getMetageneration() != null) {
attributes.put(METAGENERATION_ATTR, String.valueOf(blob.getMetageneration()));
}
if (blob.getOwner() != null) {
final Acl.Entity entity = blob.getOwner();
if (entity instanceof Acl.User) {
attributes.put(OWNER_ATTR, ((Acl.User) entity).getEmail());
attributes.put(OWNER_TYPE_ATTR, "user");
} else if (entity instanceof Acl.Group) {
attributes.put(OWNER_ATTR, ((Acl.Group) entity).getEmail());
attributes.put(OWNER_TYPE_ATTR, "group");
} else if (entity instanceof Acl.Domain) {
attributes.put(OWNER_ATTR, ((Acl.Domain) entity).getDomain());
attributes.put(OWNER_TYPE_ATTR, "domain");
} else if (entity instanceof Acl.Project) {
attributes.put(OWNER_ATTR, ((Acl.Project) entity).getProjectId());
attributes.put(OWNER_TYPE_ATTR, "project");
}
}
if (blob.getSelfLink() != null) {
attributes.put(URI_ATTR, blob.getSelfLink());
}
if (blob.getContentDisposition() != null) {
attributes.put(CONTENT_DISPOSITION_ATTR, blob.getContentDisposition());
final Util.ParsedContentDisposition parsedContentDisposition = Util.parseContentDisposition(blob.getContentDisposition());
if (parsedContentDisposition != null) {
attributes.put(CoreAttributes.FILENAME.key(), parsedContentDisposition.getFileName());
}
}
if (blob.getCreateTime() != null) {
attributes.put(CREATE_TIME_ATTR, String.valueOf(blob.getCreateTime()));
}
if (blob.getUpdateTime() != null) {
attributes.put(UPDATE_TIME_ATTR, String.valueOf(blob.getUpdateTime()));
}
} catch (StorageException e) {
getLogger().error(e.getMessage(), e);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
}
if (!attributes.isEmpty()) {
flowFile = session.putAllAttributes(flowFile, attributes);
}
session.transfer(flowFile, REL_SUCCESS);
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
getLogger().info("Successfully retrieved GCS Object for {} in {} millis; routing to success", new Object[] { flowFile, millis });
session.getProvenanceReporter().fetch(flowFile, "https://" + bucketName + ".storage.googleapis.com/" + key, millis);
}
use of com.google.cloud.ReadChannel in project google-cloud-java by GoogleCloudPlatform.
the class BlobReadChannelTest method testSaveAndRestore.
@Test
public void testSaveAndRestore() throws IOException {
byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE);
byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE);
ByteBuffer firstReadBuffer = ByteBuffer.allocate(42);
ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)).andReturn(Tuple.of("etag", firstResult));
expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)).andReturn(Tuple.of("etag", secondResult));
replay(storageRpcMock);
reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
reader.read(firstReadBuffer);
RestorableState<ReadChannel> readerState = reader.capture();
ReadChannel restoredReader = readerState.restore();
restoredReader.read(secondReadBuffer);
assertArrayEquals(Arrays.copyOf(firstResult, firstReadBuffer.capacity()), firstReadBuffer.array());
assertArrayEquals(secondResult, secondReadBuffer.array());
}
use of com.google.cloud.ReadChannel in project google-cloud-java by GoogleCloudPlatform.
the class SerializationTest method restorableObjects.
@Override
protected Restorable<?>[] restorableObjects() {
StorageOptions options = StorageOptions.newBuilder().setProjectId("p2").build();
ReadChannel reader = new BlobReadChannel(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS);
// avoid closing when you don't want partial writes to GCS upon failure
@SuppressWarnings("resource") BlobWriteChannel writer = new BlobWriteChannel(options, BlobInfo.newBuilder(BlobId.of("b", "n")).build(), "upload-id");
return new Restorable<?>[] { reader, writer };
}
use of com.google.cloud.ReadChannel in project google-cloud-java by GoogleCloudPlatform.
the class BlobTest method testReader.
@Test
public void testReader() throws Exception {
initializeExpectedBlob(2);
ReadChannel channel = createMock(ReadChannel.class);
expect(storage.getOptions()).andReturn(mockOptions);
expect(storage.reader(BLOB_INFO.getBlobId())).andReturn(channel);
replay(storage);
initializeBlob();
assertSame(channel, blob.reader());
}
use of com.google.cloud.ReadChannel in project google-cloud-java by GoogleCloudPlatform.
the class StorageImplTest method testReaderWithOptionsFromBlobId.
@Test
public void testReaderWithOptionsFromBlobId() throws IOException {
byte[] result = new byte[DEFAULT_CHUNK_SIZE];
EasyMock.expect(storageRpcMock.read(BLOB_INFO1.getBlobId().toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)).andReturn(Tuple.of("etag", result));
EasyMock.replay(storageRpcMock);
initializeService();
ReadChannel channel = storage.reader(BLOB_INFO1.getBlobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION);
assertNotNull(channel);
assertTrue(channel.isOpen());
channel.read(ByteBuffer.allocate(42));
}
Aggregations