use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgIconSpreadsheetJobRunner method run.
@Override
public void run(JobService jobService, PkgIconSpreadsheetJobSpecification specification) throws IOException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
final ObjectContext context = serverRuntime.newContext();
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
final List<PkgIconConfiguration> pkgIconConfigurations = pkgIconService.getInUsePkgIconConfigurations(context);
{
List<String> headings = new ArrayList<>();
headings.add("pkg-name");
headings.add("repository-codes");
headings.add("no-icons");
for (PkgIconConfiguration pkgIconConfiguration : pkgIconConfigurations) {
StringBuilder heading = new StringBuilder();
heading.append(pkgIconConfiguration.getMediaType().getCode());
if (null != pkgIconConfiguration.getSize()) {
heading.append("@");
heading.append(pkgIconConfiguration.getSize().toString());
}
headings.add(heading.toString());
}
writer.writeNext(headings.toArray(new String[headings.size()]));
}
// stream out the packages.
long startMs = System.currentTimeMillis();
LOGGER.info("will produce icon spreadsheet report");
long count = pkgService.eachPkg(context, false, pkg -> {
PkgSupplement pkgSupplement = pkg.getPkgSupplement();
List<String> cells = new ArrayList<>();
cells.add(pkg.getName());
cells.add(repositoryService.getRepositoriesForPkg(context, pkg).stream().map(Repository::getCode).collect(Collectors.joining(";")));
cells.add(pkg.getPkgSupplement().getPkgIcons().isEmpty() ? MARKER : "");
for (PkgIconConfiguration pkgIconConfiguration : pkgIconConfigurations) {
cells.add(pkgSupplement.getPkgIcon(pkgIconConfiguration.getMediaType(), pkgIconConfiguration.getSize()).map(pi -> MARKER).orElse(""));
}
writer.writeNext(cells.toArray(new String[cells.size()]));
return true;
});
LOGGER.info("did produce icon report for {} packages in {}ms", count, System.currentTimeMillis() - startMs);
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgLocalizationCoverageExportSpreadsheetJobRunner method run.
@Override
public void run(JobService jobService, PkgLocalizationCoverageExportSpreadsheetJobSpecification specification) throws IOException, JobRunnerException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
final ObjectContext context = serverRuntime.newContext();
final List<NaturalLanguage> naturalLanguages = getNaturalLanguages(context);
if (naturalLanguages.isEmpty()) {
throw new RuntimeException("there appear to be no natural languages in the system");
}
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
final String[] cells = new String[1 + naturalLanguages.size()];
// headers
{
int c = 0;
cells[c++] = "pkg-name";
for (NaturalLanguage naturalLanguage : naturalLanguages) {
cells[c++] = naturalLanguage.getCode();
}
}
long startMs = System.currentTimeMillis();
writer.writeNext(cells);
// stream out the packages.
final long expectedTotal = pkgService.totalPkg(context, false);
final AtomicLong counter = new AtomicLong(0);
LOGGER.info("will produce package localization report for {} packages", expectedTotal);
long count = pkgService.eachPkg(context, // allow source only.
false, pkg -> {
PkgSupplement pkgSupplement = pkg.getPkgSupplement();
int c = 0;
cells[c++] = pkg.getName();
for (NaturalLanguage naturalLanguage : naturalLanguages) {
cells[c++] = pkgSupplement.getPkgLocalization(naturalLanguage).map(pl -> MARKER).orElse("");
}
writer.writeNext(cells);
jobService.setJobProgressPercent(specification.getGuid(), (int) ((100 * counter.incrementAndGet()) / expectedTotal));
// keep going!
return true;
});
LOGGER.info("did produce pkg localization coverage spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs);
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgProminenceAndUserRatingSpreadsheetJobRunner method run.
@Override
public void run(JobService jobService, PkgProminenceAndUserRatingSpreadsheetJobSpecification specification) throws IOException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
final ObjectContext context = serverRuntime.newContext();
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
writer.writeNext(new String[] { "pkg-name", "repository-code", "prominence-name", "prominence-ordering", "derived-rating", "derived-rating-sample-size" });
// stream out the packages.
long startMs = System.currentTimeMillis();
LOGGER.info("will produce prominence spreadsheet report");
long count = pkgService.eachPkg(context, false, pkg -> {
List<PkgProminence> pkgProminences = PkgProminence.findByPkg(context, pkg);
List<PkgUserRatingAggregate> pkgUserRatingAggregates = PkgUserRatingAggregate.findByPkg(context, pkg);
List<Repository> repositories = Stream.concat(pkgProminences.stream().map(PkgProminence::getRepository), pkgUserRatingAggregates.stream().map(PkgUserRatingAggregate::getRepository)).distinct().sorted().collect(Collectors.toList());
if (repositories.isEmpty()) {
writer.writeNext(new String[] { pkg.getName(), "", "", "", "", "" });
} else {
for (Repository repository : repositories) {
Optional<PkgProminence> pkgProminenceOptional = pkgProminences.stream().filter(pp -> pp.getRepository().equals(repository)).collect(SingleCollector.optional());
Optional<PkgUserRatingAggregate> pkgUserRatingAggregateOptional = pkgUserRatingAggregates.stream().filter(pura -> pura.getRepository().equals(repository)).collect(SingleCollector.optional());
writer.writeNext(new String[] { pkg.getName(), repository.getCode(), pkgProminenceOptional.map(p -> p.getProminence().getName()).orElse(""), pkgProminenceOptional.map(p -> p.getProminence().getOrdering().toString()).orElse(""), pkgUserRatingAggregateOptional.map(p -> p.getDerivedRating().toString()).orElse(""), pkgUserRatingAggregateOptional.map(p -> p.getDerivedRatingSampleSize().toString()).orElse("") });
}
}
return true;
});
LOGGER.info("did produce prominence spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs);
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgScreenshotImportArchiveJobRunner method run.
@Override
public void run(JobService jobService, PkgScreenshotImportArchiveJobSpecification specification) throws IOException, JobRunnerException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing input data guid on specification");
Preconditions.checkArgument(null != specification.getImportStrategy(), "missing import strategy on specification");
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(specification.getInputDataGuid());
if (!jobDataWithByteSourceOptional.isPresent()) {
throw new IllegalStateException("the job data was not able to be found for guid; " + specification.getInputDataGuid());
}
if (!serverRuntime.performInTransaction(() -> {
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
Map<String, ScreenshotImportMetadatas> metadatas = new HashMap<>();
writer.writeNext(new String[] { "path", "pkg-name", "action", "message", "code" });
// sweep through and collect meta-data about the packages in the tar file.
LOGGER.info("will collect data about packages' screenshots from the archive", metadatas.size());
consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> collectScreenshotMetadataFromArchive(metadatas, ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
LOGGER.info("did collect data about {} packages' screenshots from the archive", metadatas.size());
LOGGER.info("will collect data about persisted packages' screenshots");
collectPersistedScreenshotMetadata(metadatas);
LOGGER.info("did collect data about persisted packages' screenshots");
if (specification.getImportStrategy() == PkgScreenshotImportArchiveJobSpecification.ImportStrategy.REPLACE) {
LOGGER.info("will delete persisted screenshots that are absent from the archive");
int deleted = deletePersistedScreenshotsThatAreNotPresentInArchiveAndReport(writer, metadatas.values());
LOGGER.info("did delete {} persisted screenshots that are absent from the archive", deleted);
}
blendInArtificialOrderings(metadatas.values());
// sweep through the archive again and load in those screenshots that are not already present.
// The ordering of the inbound data should be preserved.
LOGGER.info("will load screenshots from archive", metadatas.size());
consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> importScreenshotsFromArchiveAndReport(writer, metadatas.get(ae.getPkgName()), ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
LOGGER.info("did load screenshots from archive", metadatas.size());
return true;
} catch (IOException e) {
LOGGER.error("unable to complete the job", e);
}
return false;
})) {
throw new JobRunnerException("unable to complete job");
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgScreenshotSpreadsheetJobRunner method run.
@Override
public void run(JobService jobService, PkgScreenshotSpreadsheetJobSpecification specification) throws IOException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
final ObjectContext context = serverRuntime.newContext();
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
String[] headings = new String[] { "pkg-name", "repository-codes", "screenshot-count", "screenshot-bytes" };
writer.writeNext(headings);
String[] cells = new String[4];
// stream out the packages.
long startMs = System.currentTimeMillis();
LOGGER.info("will produce spreadsheet spreadsheet report");
long count = pkgService.eachPkg(context, false, pkg -> {
PkgSupplement pkgSupplement = pkg.getPkgSupplement();
cells[0] = pkg.getName();
cells[1] = repositoryService.getRepositoriesForPkg(context, pkg).stream().map(Repository::getCode).collect(Collectors.joining(";"));
cells[2] = Integer.toString(pkgSupplement.getPkgScreenshots().size());
cells[3] = Integer.toString(pkgSupplement.getPkgScreenshots().stream().mapToInt(_PkgScreenshot::getLength).sum());
writer.writeNext(cells);
return true;
});
LOGGER.info("did produce spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs);
}
}
Aggregations