use of org.haiku.haikudepotserver.job.model.JobDataWithByteSource in project haikudepotserver by haiku.
the class JobDataWriteListener method onWritePossible.
@Override
public void onWritePossible() throws IOException {
Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(jobDataGuid);
if (!jobDataWithByteSourceOptional.isPresent()) {
LOGGER.error("unable to find the job data for; " + jobDataGuid);
async.complete();
} else {
ByteSource byteSource = jobDataWithByteSourceOptional.get().getByteSource();
while (payloadOffset >= 0 && outputStream.isReady()) {
ByteSource subPayloadByteStream = byteSource.slice(payloadOffset, BUFFER_SIZE);
int subPayloadBufferFillLength = readToBuffer(subPayloadByteStream);
if (0 == subPayloadBufferFillLength) {
async.complete();
LOGGER.info("did complete async stream job data; {}", jobDataGuid);
payloadOffset = -1;
} else {
outputStream.write(subPayloadBuffer, 0, subPayloadBufferFillLength);
payloadOffset += subPayloadBufferFillLength;
}
}
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSource in project haikudepotserver by haiku.
the class PkgScreenshotImportArchiveJobRunner method run.
@Override
public void run(JobService jobService, PkgScreenshotImportArchiveJobSpecification specification) throws IOException, JobRunnerException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing input data guid on specification");
Preconditions.checkArgument(null != specification.getImportStrategy(), "missing import strategy on specification");
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(specification.getInputDataGuid());
if (!jobDataWithByteSourceOptional.isPresent()) {
throw new IllegalStateException("the job data was not able to be found for guid; " + specification.getInputDataGuid());
}
if (!serverRuntime.performInTransaction(() -> {
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
Map<String, ScreenshotImportMetadatas> metadatas = new HashMap<>();
writer.writeNext(new String[] { "path", "pkg-name", "action", "message", "code" });
// sweep through and collect meta-data about the packages in the tar file.
LOGGER.info("will collect data about packages' screenshots from the archive", metadatas.size());
consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> collectScreenshotMetadataFromArchive(metadatas, ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
LOGGER.info("did collect data about {} packages' screenshots from the archive", metadatas.size());
LOGGER.info("will collect data about persisted packages' screenshots");
collectPersistedScreenshotMetadata(metadatas);
LOGGER.info("did collect data about persisted packages' screenshots");
if (specification.getImportStrategy() == PkgScreenshotImportArchiveJobSpecification.ImportStrategy.REPLACE) {
LOGGER.info("will delete persisted screenshots that are absent from the archive");
int deleted = deletePersistedScreenshotsThatAreNotPresentInArchiveAndReport(writer, metadatas.values());
LOGGER.info("did delete {} persisted screenshots that are absent from the archive", deleted);
}
blendInArtificialOrderings(metadatas.values());
// sweep through the archive again and load in those screenshots that are not already present.
// The ordering of the inbound data should be preserved.
LOGGER.info("will load screenshots from archive", metadatas.size());
consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> importScreenshotsFromArchiveAndReport(writer, metadatas.get(ae.getPkgName()), ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
LOGGER.info("did load screenshots from archive", metadatas.size());
return true;
} catch (IOException e) {
LOGGER.error("unable to complete the job", e);
}
return false;
})) {
throw new JobRunnerException("unable to complete job");
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSource in project haikudepotserver by haiku.
the class LocalJobServiceIT method testHappyDays.
/**
* <p>This will be a bit of an unstable test (non-repeatable) because it is
* going to drive jobs into the job service and see them all run correctly.
* It introduces some random delays.</p>
*/
@Test
public void testHappyDays() {
// -------------------------
List<String> guids = IntStream.of(1, 2, 3, 4).mapToObj((i) -> new TestNumberedLinesJobSpecification(3, 500L)).map((spec) -> jobService.submit(spec, JobSnapshot.COALESCE_STATUSES_NONE)).collect(Collectors.toList());
String immediateGuid = jobService.immediate(new TestNumberedLinesJobSpecification(3, 500L), false);
// -------------------------
Stream.concat(guids.stream(), Stream.of(immediateGuid)).forEach((guid) -> {
jobService.awaitJobFinishedUninterruptibly(guid, TimeUnit.SECONDS.toMillis(15));
try {
Optional<? extends JobSnapshot> jobSnapshotOptional = jobService.tryGetJob(guid);
Assertions.assertThat(jobSnapshotOptional.isPresent()).isTrue();
Assertions.assertThat(jobSnapshotOptional.get().getStatus()).isEqualTo(JobSnapshot.Status.FINISHED);
Set<String> dataGuids = jobSnapshotOptional.get().getGeneratedDataGuids();
Assertions.assertThat(dataGuids.size()).isEqualTo(1);
String dataGuid = dataGuids.iterator().next();
Optional<JobDataWithByteSource> jobDataOptional = jobService.tryObtainData(dataGuid);
Assertions.assertThat(jobDataOptional.isPresent()).isTrue();
try (InputStream inputStream = jobDataOptional.get().getByteSource().openStream();
Reader reader = new InputStreamReader(inputStream, Charsets.UTF_8)) {
Assertions.assertThat(CharStreams.toString(reader)).isEqualTo("0\n1\n2\n");
}
} catch (IOException ioe) {
new UncheckedIOException(ioe);
}
});
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSource in project haikudepotserver by haiku.
the class PkgCategoryCoverageImportSpreadsheetJobRunnerIT method testRun.
@Test
public void testRun() throws IOException {
integrationTestSupportService.createStandardTestData();
PkgCategoryCoverageImportSpreadsheetJobSpecification spec = new PkgCategoryCoverageImportSpreadsheetJobSpecification();
spec.setInputDataGuid(jobService.storeSuppliedData("input", MediaType.CSV_UTF_8.toString(), getResourceByteSource("sample-pkgcategorycoverageimportspreadsheet-supplied.csv")).getGuid());
// ------------------------------------
String guid = jobService.submit(spec, JobSnapshot.COALESCE_STATUSES_NONE);
// ------------------------------------
jobService.awaitJobFinishedUninterruptibly(guid, 10000);
Optional<? extends JobSnapshot> snapshotOptional = jobService.tryGetJob(guid);
Assertions.assertThat(snapshotOptional.get().getStatus()).isEqualTo(JobSnapshot.Status.FINISHED);
String dataGuid = snapshotOptional.get().getGeneratedDataGuids().stream().collect(SingleCollector.single());
JobDataWithByteSource jobSource = jobService.tryObtainData(dataGuid).get();
ByteSource expectedByteSource = getResourceByteSource("sample-pkgcategorycoverageimportspreadsheet-generated.csv");
try (BufferedReader jobReader = jobSource.getByteSource().asCharSource(Charsets.UTF_8).openBufferedStream();
BufferedReader sampleReader = expectedByteSource.asCharSource(Charsets.UTF_8).openBufferedStream()) {
assertEqualsLineByLine(sampleReader, jobReader);
}
// one of the packages was changed; check that the change is in the database successfully.
{
ObjectContext context = serverRuntime.newContext();
Pkg pkg = Pkg.getByName(context, "pkg1");
Set<String> pkg1PkgCategoryCodes = pkg.getPkgSupplement().getPkgPkgCategories().stream().map(c -> c.getPkgCategory().getCode()).collect(Collectors.toSet());
Assertions.assertThat(pkg1PkgCategoryCodes).isEqualTo(new <String> HashSet(Arrays.asList("audio", "graphics")));
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSource in project haikudepotserver by haiku.
the class PkgDumpExportJobRunnerIT method testRun.
/**
* <p>Uses the sample data and checks that the output from the report matches a captured, sensible-looking
* previous run.</p>
*/
@Test
public void testRun() throws IOException {
long now = DateTimeHelper.secondAccuracyDate(new Date()).getTime();
integrationTestSupportService.createStandardTestData();
PkgDumpExportJobSpecification specification = new PkgDumpExportJobSpecification();
specification.setRepositorySourceCode("testreposrc_xyz");
specification.setNaturalLanguageCode("es");
// ------------------------------------
String guid = jobService.submit(specification, JobSnapshot.COALESCE_STATUSES_NONE);
// ------------------------------------
jobService.awaitJobFinishedUninterruptibly(guid, 10000);
Optional<? extends JobSnapshot> snapshotOptional = jobService.tryGetJob(guid);
Assertions.assertThat(snapshotOptional.get().getStatus()).isEqualTo(JobSnapshot.Status.FINISHED);
// pull in the ZIP file now and extract the data
String dataGuid = snapshotOptional.get().getGeneratedDataGuids().iterator().next();
JobDataWithByteSource jobSource = jobService.tryObtainData(dataGuid).get();
try (final InputStream inputStream = jobSource.getByteSource().openBufferedStream();
final GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream)) {
JsonNode rootNode = objectMapper.readTree(gzipInputStream);
JsonNode dataModifiedTimestampNode = rootNode.at("/info/dataModifiedTimestamp");
Assertions.assertThat(dataModifiedTimestampNode.asLong()).isGreaterThanOrEqualTo(now);
JsonNode repositoryCode = rootNode.at("/items/0/name");
Assertions.assertThat(repositoryCode.asText()).isEqualTo("pkg1");
JsonNode derivedRating = rootNode.at("/items/0/derivedRating");
Assertions.assertThat(derivedRating.asText()).isEqualTo("3.5");
JsonNode pkgScreenshots = rootNode.at("/items/0/pkgScreenshots/0/length");
Assertions.assertThat(pkgScreenshots.asLong()).isEqualTo(41296L);
JsonNode pkgCategories = rootNode.at("/items/0/pkgCategories/0/code");
Assertions.assertThat(pkgCategories.asText()).isEqualTo("graphics");
JsonNode pv0Summary = rootNode.at("/items/0/pkgVersions/0/summary");
Assertions.assertThat(pv0Summary.asText()).isEqualTo("pkg1Version2SummarySpanish_feijoa");
}
}
Aggregations