use of org.dataportabilityproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class JobPollingServiceTest method pollingLifeCycle.
// TODO(data-portability/issues/43): Make this an integration test which uses both the API and
// worker, rather than simulating API calls, in case this test ever diverges from what the API
// actually does.
@Test
public void pollingLifeCycle() throws Exception {
when(asymmetricKeyGenerator.generate()).thenReturn(TEST_KEY_PAIR);
// Initial state
assertThat(JobMetadata.isInitialized()).isFalse();
// Run once with no data in the database
jobPollingService.runOneIteration();
assertThat(JobMetadata.isInitialized()).isFalse();
PortabilityJob job = store.findJob(TEST_ID);
// No existing ready job
assertThat(job).isNull();
// API inserts an job in initial authorization state
job = PortabilityJob.builder().setTransferDataType("photo").setExportService("DummyExportService").setImportService("DummyImportService").setAndValidateJobAuthorization(JobAuthorization.builder().setState(State.INITIAL).setSessionSecretKey("fooBar").build()).build();
store.createJob(TEST_ID, job);
// Verify initial authorization state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(State.INITIAL);
// no auth data should exist yet
assertThat(job.jobAuthorization().encryptedExportAuthData()).isNull();
assertThat(job.jobAuthorization().encryptedImportAuthData()).isNull();
// API atomically updates job to from 'initial' to 'creds available'
job = job.toBuilder().setAndValidateJobAuthorization(job.jobAuthorization().toBuilder().setState(State.CREDS_AVAILABLE).build()).build();
store.updateJob(TEST_ID, job);
// Verify 'creds available' state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(State.CREDS_AVAILABLE);
// no auth data should exist yet
assertThat(job.jobAuthorization().encryptedExportAuthData()).isNull();
assertThat(job.jobAuthorization().encryptedImportAuthData()).isNull();
// Worker initiates the JobPollingService
jobPollingService.runOneIteration();
assertThat(JobMetadata.isInitialized()).isTrue();
assertThat(JobMetadata.getJobId()).isEqualTo(TEST_ID);
// Verify assigned without auth data state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(JobAuthorization.State.CREDS_ENCRYPTION_KEY_GENERATED);
assertThat(job.jobAuthorization().authPublicKey()).isNotEmpty();
// Client encrypts data and updates the job
job = job.toBuilder().setAndValidateJobAuthorization(job.jobAuthorization().toBuilder().setEncryptedExportAuthData("dummy export data").setEncryptedImportAuthData("dummy import data").setState(State.CREDS_ENCRYPTED).build()).build();
store.updateJob(TEST_ID, job);
// Run another iteration of the polling service
// Worker should pick up encrypted data and update job
jobPollingService.runOneIteration();
job = store.findJob(TEST_ID);
JobAuthorization jobAuthorization = job.jobAuthorization();
assertThat(jobAuthorization.state()).isEqualTo(JobAuthorization.State.CREDS_ENCRYPTED);
assertThat(jobAuthorization.encryptedExportAuthData()).isNotEmpty();
assertThat(jobAuthorization.encryptedImportAuthData()).isNotEmpty();
store.remove(TEST_ID);
}
use of org.dataportabilityproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class StartJobAction method handle.
/**
* Starts a job using the following flow:
* <li>Validate auth data is present in cookies
* <li>Set Job to state CREDS_AVAILABLE
* <li>Wait for a worker to be assigned
* <li>Once worker assigned, grab worker key to encrypt auth data from cookies
* <li>Update job with auth data
*/
@Override
public StartJobActionResponse handle(StartJobActionRequest request) {
UUID jobId = request.getJobId();
// Update the job to indicate to worker processes that creds are available for encryption
updateStateToCredsAvailable(jobId);
// Poll and block until a public key is assigned to this job, e.g. from a specific worker
// instance
PortabilityJob job = pollForPublicKey(jobId);
// Update this job with credentials encrypted with a public key, e.g. for a specific worker
// instance
encryptAndUpdateJobWithCredentials(jobId, job, request.getEncryptedExportAuthCredential(), request.getEncryptedImportAuthCredential());
return StartJobActionResponse.create(jobId);
}
use of org.dataportabilityproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class CosmosStoreTest method verifyCreateAndFind.
@Test
@Ignore
public void verifyCreateAndFind() throws Exception {
PrimingRequest.Then.ThenBuilder thenInsert = PrimingRequest.then();
thenInsert.withVariableTypes(UUID, VARCHAR);
PrimingRequest createRequest = PrimingRequest.preparedStatementBuilder().withQuery(JOB_INSERT).withThen(thenInsert).build();
cassandra.primingClient().prime(createRequest);
PortabilityJob primeJob = PortabilityJob.builder().build();
java.util.UUID jobId = java.util.UUID.randomUUID();
Map row = Collections.singletonMap("job_data", new ObjectMapper().writeValueAsString(primeJob));
PrimingRequest.Then.ThenBuilder thenQuery = PrimingRequest.then();
PrimingRequest findRequest = PrimingRequest.preparedStatementBuilder().withQuery(JOB_QUERY).withThen(thenQuery.withVariableTypes(UUID).withColumnTypes(ColumnMetadata.column("job_id", UUID)).withRows(row)).build();
cassandra.primingClient().prime(findRequest);
PrimingRequest.Then.ThenBuilder thenUpdate = PrimingRequest.then();
thenUpdate.withVariableTypes(VARCHAR, UUID).withColumnTypes(ColumnMetadata.column("job_data", VARCHAR), ColumnMetadata.column("job_id", UUID));
PrimingRequest updateRequest = PrimingRequest.preparedStatementBuilder().withQuery(JOB_UPDATE).withThen(thenUpdate).build();
cassandra.primingClient().prime(updateRequest);
PrimingRequest.Then.ThenBuilder thenRemove = PrimingRequest.then();
thenRemove.withVariableTypes(UUID);
PrimingRequest removeRequest = PrimingRequest.preparedStatementBuilder().withQuery(JOB_DELETE).withThen(thenRemove).build();
cassandra.primingClient().prime(removeRequest);
PortabilityJob createJob = PortabilityJob.builder().build();
cosmosStore.createJob(jobId, createJob);
PortabilityJob copy = cosmosStore.findJob(jobId).toBuilder().setState(PortabilityJob.State.COMPLETE).build();
cosmosStore.updateJob(jobId, copy);
cosmosStore.remove(jobId);
PreparedStatementExecution expectedStatement = PreparedStatementExecution.builder().withPreparedStatementText(JOB_DELETE).withConsistency("LOCAL_ONE").build();
Assert.assertThat(cassandra.activityClient().retrievePreparedStatementExecutions(), preparedStatementRecorded(expectedStatement));
}
use of org.dataportabilityproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class GoogleJobStore method updateJob.
/**
* Verifies a {@code PortabilityJob} already exists for {@code jobId}, and updates the entry to
* {@code job}, within a {@code Transaction}. If {@code validator} is non-null,
* validator.validate() is called first in the transaction.
*
* @throws IOException if a job didn't already exist for {@code jobId} or there was a problem
* updating it
* @throws IllegalStateException if validator.validate() failed
*/
@Override
public void updateJob(UUID jobId, PortabilityJob job, JobUpdateValidator validator) throws IOException {
Preconditions.checkNotNull(jobId);
Transaction transaction = datastore.newTransaction();
Key key = getKey(jobId);
try {
Entity previousEntity = transaction.get(key);
if (previousEntity == null) {
throw new IOException("Could not find record for jobId: " + jobId);
}
if (validator != null) {
PortabilityJob previousJob = PortabilityJob.fromMap(getProperties(previousEntity));
validator.validate(previousJob, job);
}
Entity newEntity = createEntity(key, job.toMap());
transaction.put(newEntity);
transaction.commit();
} catch (Throwable t) {
transaction.rollback();
throw new IOException("Failed atomic update of jobId: " + jobId, t);
}
}
use of org.dataportabilityproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class Oauth2CallbackHandler method handleExchange.
private String handleExchange(HttpExchange exchange) throws IOException {
String redirect = "/error";
try {
Headers requestHeaders = exchange.getRequestHeaders();
String requestURL = ReferenceApiUtils.createURL(requestHeaders.getFirst(HttpHeaders.HOST), exchange.getRequestURI().toString(), IS_LOCAL);
AuthorizationCodeResponseUrl authResponse = new AuthorizationCodeResponseUrl(requestURL);
// check for user-denied error
if (authResponse.getError() != null) {
logger.warn("Authorization DENIED: {} Redirecting to /error", authResponse.getError());
return redirect;
}
// retrieve cookie from exchange
Map<String, HttpCookie> httpCookies = ReferenceApiUtils.getCookies(requestHeaders);
HttpCookie encodedIdCookie = httpCookies.get(JsonKeys.ID_COOKIE_KEY);
Preconditions.checkArgument(encodedIdCookie != null && !Strings.isNullOrEmpty(encodedIdCookie.getValue()), "Encoded Id cookie required");
UUID jobId = ReferenceApiUtils.decodeJobId(encodedIdCookie.getValue());
logger.debug("State token: {}", authResponse.getState());
// TODO(#258): Check job ID in state token, was broken during local demo
// UUID jobIdFromState = ReferenceApiUtils.decodeJobId(authResponse.getState());
// // TODO: Remove sanity check
// Preconditions.checkState(
// jobIdFromState.equals(jobId),
// "Job id in cookie [%s] and request [%s] should match",
// jobId,
// jobIdFromState);
PortabilityJob job = store.findJob(jobId);
Preconditions.checkNotNull(job, "existing job not found for jobId: %s", jobId);
// TODO: Determine service from job or from authUrl path?
AuthMode authMode = ReferenceApiUtils.getAuthMode(exchange.getRequestHeaders());
String service = (authMode == AuthMode.EXPORT) ? job.exportService() : job.importService();
Preconditions.checkState(!Strings.isNullOrEmpty(service), "service not found, service: %s authMode: %s, jobId: %s", service, authMode, jobId.toString());
AuthDataGenerator generator = registry.getAuthDataGenerator(service, job.transferDataType(), authMode);
Preconditions.checkNotNull(generator, "Generator not found for type: %s, service: %s", job.transferDataType(), service);
// Obtain the session key for this job
String encodedSessionKey = job.jobAuthorization().sessionSecretKey();
SecretKey key = symmetricKeyGenerator.parse(BaseEncoding.base64Url().decode(encodedSessionKey));
// Retrieve initial auth data, if it existed
AuthData initialAuthData = null;
String encryptedInitialAuthData = (authMode == AuthMode.EXPORT) ? job.jobAuthorization().encryptedInitialExportAuthData() : job.jobAuthorization().encryptedInitialImportAuthData();
if (encryptedInitialAuthData != null) {
// Retrieve and parse the session key from the job
// Decrypt and deserialize the object
String serialized = DecrypterFactory.create(key).decrypt(encryptedInitialAuthData);
initialAuthData = objectMapper.readValue(serialized, AuthData.class);
}
// TODO: Use UUID instead of UUID.toString()
// Generate auth data
AuthData authData = generator.generateAuthData(baseApiUrl, authResponse.getCode(), jobId.toString(), initialAuthData, null);
Preconditions.checkNotNull(authData, "Auth data should not be null");
// Serialize and encrypt the auth data
String serialized = objectMapper.writeValueAsString(authData);
String encryptedAuthData = EncrypterFactory.create(key).encrypt(serialized);
// Set new cookie
ReferenceApiUtils.setCookie(exchange.getResponseHeaders(), encryptedAuthData, authMode);
redirect = baseUrl + ((authMode == AuthMode.EXPORT) ? FrontendConstantUrls.URL_NEXT_PAGE : FrontendConstantUrls.URL_COPY_PAGE);
} catch (Exception e) {
logger.error("Error handling request: {}", e);
throw e;
}
return redirect;
}
Aggregations