use of org.datatransferproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class StartTransferJobAction method handle.
@Override
public TransferJob handle(StartTransferJob startTransferJob) {
String id = startTransferJob.getId();
Preconditions.checkNotNull(id, "transfer job ID required for StartTransferJobAction");
UUID jobId = decodeJobId(id);
PortabilityJob job = jobStore.findJob(jobId);
String authData = startTransferJob.getEncryptedAuthData();
job = updateJobWithCredentials(jobId, job, authData);
return new TransferJob(id, job.exportService(), job.importService(), job.transferDataType(), null, null, null, null, null, null);
}
use of org.datatransferproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class JobPollingServiceTest method pollingLifeCycle.
// TODO(data-transfer-project/issues/43): Make this an integration test which uses both the API
// and transfer worker, rather than simulating API calls, in case this test ever diverges from
// what the API actually does.
@Test
public void pollingLifeCycle() throws Exception {
when(asymmetricKeyGenerator.generate()).thenReturn(TEST_KEY_PAIR);
// Initial state
assertThat(JobMetadata.isInitialized()).isFalse();
// Run once with no data in the database
jobPollingService.runOneIteration();
assertThat(JobMetadata.isInitialized()).isFalse();
PortabilityJob job = store.findJob(TEST_ID);
// No existing ready job
assertThat(job).isNull();
// API inserts an job in initial authorization state
job = PortabilityJob.builder().setTransferDataType("photo").setExportService("DummyExportService").setImportService("DummyImportService").setAndValidateJobAuthorization(JobAuthorization.builder().setEncryptionScheme("cleartext").setState(State.INITIAL).setSessionSecretKey("fooBar").build()).build();
store.createJob(TEST_ID, job);
// Verify initial authorization state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(State.INITIAL);
// no auth data should exist yet
assertThat(job.jobAuthorization().encryptedAuthData()).isNull();
// API atomically updates job to from 'initial' to 'creds available'
job = job.toBuilder().setAndValidateJobAuthorization(job.jobAuthorization().toBuilder().setState(State.CREDS_AVAILABLE).build()).build();
store.updateJobAuthStateToCredsAvailable(TEST_ID);
// Verify 'creds available' state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(State.CREDS_AVAILABLE);
// no auth data should exist yet
assertThat(job.jobAuthorization().encryptedAuthData()).isNull();
// Worker initiates the JobPollingService
jobPollingService.runOneIteration();
assertThat(JobMetadata.isInitialized()).isTrue();
assertThat(JobMetadata.getJobId()).isEqualTo(TEST_ID);
// Verify assigned without auth data state
job = store.findJob(TEST_ID);
assertThat(job.jobAuthorization().state()).isEqualTo(JobAuthorization.State.CREDS_ENCRYPTION_KEY_GENERATED);
assertThat(job.jobAuthorization().authPublicKey()).isNotEmpty();
// Client encrypts data and updates the job
job = job.toBuilder().setAndValidateJobAuthorization(job.jobAuthorization().toBuilder().setEncryptedAuthData("dummy export data").setState(State.CREDS_STORED).build()).build();
store.updateJobWithCredentials(TEST_ID, job);
// Run another iteration of the polling service
// Worker should pick up encrypted data and update job
jobPollingService.runOneIteration();
job = store.findJob(TEST_ID);
JobAuthorization jobAuthorization = job.jobAuthorization();
assertThat(jobAuthorization.state()).isEqualTo(JobAuthorization.State.CREDS_STORED);
assertThat(jobAuthorization.encryptedAuthData()).isNotEmpty();
store.remove(TEST_ID);
}
use of org.datatransferproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class JobCancelWatchingService method runOneIteration.
@Override
protected void runOneIteration() {
if (!JobMetadata.isInitialized()) {
return;
}
monitor.debug(() -> "polling for job to check cancellation");
PortabilityJob currentJob = store.findJob(JobMetadata.getJobId());
switch(currentJob.state()) {
case CANCELED:
monitor.info(() -> String.format("Job %s is canceled", JobMetadata.getJobId()), EventCode.WORKER_JOB_CANCELED);
dtpInternalMetricRecorder.cancelledJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService(), JobMetadata.getStopWatch().elapsed());
monitor.flushLogs();
System.exit(0);
break;
case ERROR:
monitor.severe(() -> String.format("Job %s is errored", JobMetadata.getJobId()), EventCode.WATCHING_SERVICE_JOB_ERRORED);
recordGeneralMetric(PortabilityJob.State.ERROR.toString());
monitor.flushLogs();
System.exit(0);
break;
case PREEMPTED:
monitor.info(() -> String.format("Job %s is preempted", JobMetadata.getJobId()), EventCode.WATCHING_SERVICE_JOB_PREEMPTED);
recordGeneralMetric(PortabilityJob.State.PREEMPTED.toString());
monitor.flushLogs();
System.exit(0);
break;
default:
monitor.debug(() -> String.format("Job %s is not canceled or errored or preempted", JobMetadata.getJobId()));
}
}
use of org.datatransferproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class JobProcessor method processJob.
/**
* Process our job, whose metadata is available via {@link JobMetadata}.
*/
void processJob() {
boolean success = false;
UUID jobId = JobMetadata.getJobId();
monitor.debug(() -> format("Begin processing jobId: %s", jobId), EventCode.WORKER_JOB_STARTED);
Collection<ErrorDetail> errors = null;
try {
markJobStarted(jobId);
hooks.jobStarted(jobId);
PortabilityJob job = store.findJob(jobId);
JobAuthorization jobAuthorization = job.jobAuthorization();
monitor.debug(() -> format("Starting copy job, id: %s, source: %s, destination: %s", jobId, job.exportService(), job.importService()));
String scheme = jobAuthorization.encryptionScheme();
AuthDataDecryptService decryptService = getAuthDecryptService(scheme);
if (decryptService == null) {
monitor.severe(() -> format("No auth decrypter found for scheme %s while processing job: %s", scheme, jobId));
return;
}
String encrypted = jobAuthorization.encryptedAuthData();
byte[] encodedPrivateKey = JobMetadata.getPrivateKey();
AuthDataPair pair = decryptService.decrypt(encrypted, encodedPrivateKey);
AuthData exportAuthData = objectMapper.readValue(pair.getExportAuthData(), AuthData.class);
AuthData importAuthData = objectMapper.readValue(pair.getImportAuthData(), AuthData.class);
String exportInfoStr = job.exportInformation();
Optional<ExportInformation> exportInfo = Optional.empty();
if (!Strings.isNullOrEmpty(exportInfoStr)) {
exportInfo = Optional.of(objectMapper.readValue(exportInfoStr, ExportInformation.class));
}
// Copy the data
dtpInternalMetricRecorder.startedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService());
JobMetadata.getStopWatch().start();
errors = copier.copy(exportAuthData, importAuthData, jobId, exportInfo);
final int numErrors = errors.size();
monitor.debug(() -> format("Finished copy for jobId: %s with %d error(s).", jobId, numErrors));
success = errors.isEmpty();
} catch (CopyExceptionWithFailureReason e) {
String failureReason = e.getFailureReason();
if (failureReason.contains(FailureReasons.DESTINATION_FULL.toString())) {
monitor.info(() -> "The remaining storage in the user's account is not enough to perform this operation.", e);
} else if (failureReason.contains(FailureReasons.INVALID_TOKEN.toString()) || failureReason.contains(FailureReasons.SESSION_INVALIDATED.toString()) || failureReason.contains(FailureReasons.UNCONFIRMED_USER.toString()) || failureReason.contains(FailureReasons.USER_CHECKPOINTED.toString())) {
monitor.info(() -> "Got token error", e);
} else {
monitor.severe(() -> format("Error with failure code '%s' while processing jobId: %s", failureReason, jobId), e, EventCode.WORKER_JOB_ERRORED);
}
addFailureReasonToJob(jobId, failureReason);
} catch (IOException | CopyException | RuntimeException e) {
monitor.severe(() -> "Error processing jobId: " + jobId, e, EventCode.WORKER_JOB_ERRORED);
} finally {
monitor.debug(() -> "Finished processing jobId: " + jobId, EventCode.WORKER_JOB_FINISHED);
addErrorsAndMarkJobFinished(jobId, success, errors);
hooks.jobFinished(jobId, success);
dtpInternalMetricRecorder.finishedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService(), success, JobMetadata.getStopWatch().elapsed());
monitor.flushLogs();
JobMetadata.reset();
}
}
use of org.datatransferproject.spi.cloud.types.PortabilityJob in project data-transfer-project by google.
the class JobStoreWithValidator method updateJobAuthStateToCredsAvailable.
@Override
public void updateJobAuthStateToCredsAvailable(UUID jobId) throws IOException {
PortabilityJob job = findJob(jobId);
// Set update job auth data
JobAuthorization jobAuthorization = job.jobAuthorization().toBuilder().setState(CREDS_AVAILABLE).build();
job = job.toBuilder().setAndValidateJobAuthorization(jobAuthorization).build();
updateJob(jobId, job, (previous, updated) -> validateForUpdateStateToCredsAvailable(previous));
}
Aggregations