use of com.google.cloud.spanner.Mutation in project java-docs-samples by GoogleCloudPlatform.
the class SpannerGroupWrite method main.
public static void main(String[] args) {
Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
Pipeline p = Pipeline.create(options);
String instanceId = options.getInstanceId();
String databaseId = options.getDatabaseId();
String usersIdFile = options.getSuspiciousUsersFile();
PCollection<String> suspiciousUserIds = p.apply(TextIO.read().from(usersIdFile));
final Timestamp timestamp = Timestamp.now();
// [START spanner_dataflow_writegroup]
PCollection<MutationGroup> mutations = suspiciousUserIds.apply(MapElements.via(new SimpleFunction<String, MutationGroup>() {
@Override
public MutationGroup apply(String userId) {
// Immediately block the user.
Mutation userMutation = Mutation.newUpdateBuilder("Users").set("id").to(userId).set("state").to("BLOCKED").build();
long generatedId = Hashing.sha1().newHasher().putString(userId, Charsets.UTF_8).putLong(timestamp.getSeconds()).putLong(timestamp.getNanos()).hash().asLong();
// Add an entry to pending review requests.
Mutation pendingReview = Mutation.newInsertOrUpdateBuilder("PendingReviews").set("id").to(// Must be deterministically generated.
generatedId).set("userId").to(userId).set("action").to("REVIEW ACCOUNT").set("note").to("Suspicious activity detected.").build();
return MutationGroup.create(userMutation, pendingReview);
}
}));
mutations.apply(SpannerIO.write().withInstanceId(instanceId).withDatabaseId(databaseId).grouped());
// [END spanner_dataflow_writegroup]
p.run().waitUntilFinish();
}
use of com.google.cloud.spanner.Mutation in project java-docs-samples by GoogleCloudPlatform.
the class SpannerWrite method main.
public static void main(String[] args) {
Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
Pipeline p = Pipeline.create(options);
String instanceId = options.getInstanceId();
String databaseId = options.getDatabaseId();
// Read singers from a tab-delimited file
p.apply("ReadSingers", TextIO.read().from(options.getSingersFilename())).apply("ParseSingers", ParDo.of(new ParseSinger())).apply("CreateSingerMutation", ParDo.of(new DoFn<Singer, Mutation>() {
@ProcessElement
public void processElement(ProcessContext c) {
Singer singer = c.element();
c.output(Mutation.newInsertOrUpdateBuilder("singers").set("singerId").to(singer.singerId).set("firstName").to(singer.firstName).set("lastName").to(singer.lastName).build());
}
})).apply("WriteSingers", SpannerIO.write().withInstanceId(instanceId).withDatabaseId(databaseId));
// Read albums from a tab-delimited file
PCollection<Album> albums = p.apply("ReadAlbums", TextIO.read().from(options.getAlbumsFilename())).apply("ParseAlbums", ParDo.of(new ParseAlbum()));
// [START spanner_dataflow_write]
albums.apply("CreateAlbumMutation", ParDo.of(new DoFn<Album, Mutation>() {
@ProcessElement
public void processElement(ProcessContext c) {
Album album = c.element();
c.output(Mutation.newInsertOrUpdateBuilder("albums").set("singerId").to(album.singerId).set("albumId").to(album.albumId).set("albumTitle").to(album.albumTitle).build());
}
})).apply("WriteAlbums", SpannerIO.write().withInstanceId(instanceId).withDatabaseId(databaseId));
// [END spanner_dataflow_write]
p.run().waitUntilFinish();
}
use of com.google.cloud.spanner.Mutation in project java-docs-samples by GoogleCloudPlatform.
the class SpannerGroupWriteIT method setUp.
@Before
public void setUp() throws Exception {
instanceId = System.getProperty("spanner.test.instance");
databaseId = "df-spanner-groupwrite-it";
spannerOptions = SpannerOptions.getDefaultInstance();
spanner = spannerOptions.getService();
DatabaseAdminClient adminClient = spanner.getDatabaseAdminClient();
try {
adminClient.dropDatabase(instanceId, databaseId);
} catch (SpannerException e) {
// Does not exist, ignore.
}
Operation<Database, CreateDatabaseMetadata> op = adminClient.createDatabase(instanceId, databaseId, Arrays.asList("CREATE TABLE users (" + "id STRING(MAX) NOT NULL, state STRING(MAX) NOT NULL) PRIMARY KEY (id)", "CREATE TABLE PendingReviews (id INT64, action STRING(MAX), " + "note STRING(MAX), userId STRING(MAX),) PRIMARY KEY (id)"));
op.waitFor();
DatabaseClient dbClient = getDbClient();
List<Mutation> mutations = new ArrayList<>();
for (int i = 0; i < 20; i++) {
mutations.add(Mutation.newInsertBuilder("users").set("id").to(Integer.toString(i)).set("state").to("ACTIVE").build());
}
TransactionRunner runner = dbClient.readWriteTransaction();
runner.run(new TransactionRunner.TransactionCallable<Void>() {
@Nullable
@Override
public Void run(TransactionContext tx) {
tx.buffer(mutations);
return null;
}
});
String content = IntStream.range(0, 10).mapToObj(Integer::toString).collect(Collectors.joining("\n"));
tempPath = Files.createTempFile("suspicious-ids", "txt");
Files.write(tempPath, content.getBytes());
}
use of com.google.cloud.spanner.Mutation in project beam by apache.
the class MutationCellCounter method countOf.
/**
* Count the number of cells modified by {@link MutationGroup}.
*/
public static long countOf(SpannerSchema spannerSchema, MutationGroup mutationGroup) {
long mutatedCells = 0L;
for (Mutation mutation : mutationGroup) {
if (mutation.getOperation() == Op.DELETE) {
// There is no clear way to estimate range deletes, so they are ignored.
if (isPointDelete(mutation)) {
final KeySet keySet = mutation.getKeySet();
final long rows = Iterables.size(keySet.getKeys());
mutatedCells += rows * spannerSchema.getCellsMutatedPerRow(mutation.getTable());
}
} else {
// sum the cells of the columns included in the mutation
for (String column : mutation.getColumns()) {
mutatedCells += spannerSchema.getCellsMutatedPerColumn(mutation.getTable(), column);
}
}
}
return mutatedCells;
}
use of com.google.cloud.spanner.Mutation in project beam by apache.
the class SpannerIOWriteTest method singleMutationPipelineNoProjectId.
@Test
public void singleMutationPipelineNoProjectId() throws Exception {
Mutation mutation = m(2L);
PCollection<Mutation> mutations = pipeline.apply(Create.of(mutation));
mutations.apply(SpannerIO.write().withInstanceId("test-instance").withDatabaseId("test-database").withServiceFactory(serviceFactory));
pipeline.run();
verifyBatches(batch(m(2L)));
}
Aggregations