use of com.google.cloud.firestore.WriteBatch in project java-docs-samples by GoogleCloudPlatform.
the class ManageDataSnippets method writeBatch.
/**
* Write documents in a batch.
*/
void writeBatch() throws Exception {
db.collection("cities").document("SF").set(new City()).get();
db.collection("cities").document("LA").set(new City()).get();
// [START fs_write_batch]
// Get a new write batch
WriteBatch batch = db.batch();
// Set the value of 'NYC'
DocumentReference nycRef = db.collection("cities").document("NYC");
batch.set(nycRef, new City());
// Update the population of 'SF'
DocumentReference sfRef = db.collection("cities").document("SF");
batch.update(sfRef, "population", 1000000L);
// Delete the city 'LA'
DocumentReference laRef = db.collection("cities").document("LA");
batch.delete(laRef);
// asynchronously commit the batch
ApiFuture<List<WriteResult>> future = batch.commit();
// future.get() blocks on batch commit operation
for (WriteResult result : future.get()) {
System.out.println("Update time : " + result.getUpdateTime());
}
// [END fs_write_batch]
}
use of com.google.cloud.firestore.WriteBatch in project beam by apache.
the class BaseFirestoreIT method listCollections.
@Test
@TestDataLayoutHint(DataLayout.Deep)
public final void listCollections() throws Exception {
// verification and cleanup of nested collections is much slower because each document
// requires an rpc to find its collections, instead of using the usual size, use 20
// to keep the test quick
List<String> collectionIds = IntStream.rangeClosed(1, 20).mapToObj(i -> helper.colId()).collect(Collectors.toList());
ApiFutures.transform(ApiFutures.allAsList(chunkUpDocIds(collectionIds).map(chunk -> {
WriteBatch batch = helper.getFs().batch();
chunk.stream().map(col -> helper.getBaseDocument().collection(col).document()).forEach(ref -> batch.set(ref, ImmutableMap.of("foo", "bar")));
return batch.commit();
}).collect(Collectors.toList())), FirestoreTestingHelper.flattenListList(), MoreExecutors.directExecutor()).get(10, TimeUnit.SECONDS);
PCollection<String> actualCollectionIds = testPipeline.apply(Create.of("")).apply(getListCollectionIdsPTransform(testName.getMethodName())).apply(FirestoreIO.v1().read().listCollectionIds().withRpcQosOptions(rpcQosOptions).build());
PAssert.that(actualCollectionIds).containsInAnyOrder(collectionIds);
testPipeline.run(options);
}
Aggregations