use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class PkgVersionLocalizationCoverageExportSpreadsheetJobRunner method run.
@Override
public void run(final JobService jobService, final PkgVersionLocalizationCoverageExportSpreadsheetJobSpecification specification) throws IOException, JobRunnerException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
final ObjectContext context = serverRuntime.newContext();
final List<NaturalLanguage> naturalLanguages = getNaturalLanguages(context);
final List<Architecture> architectures = Architecture.getAllExceptByCode(context, Collections.singleton(Architecture.CODE_SOURCE));
if (naturalLanguages.isEmpty()) {
throw new RuntimeException("there appear to be no natural languages in the system");
}
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
final String[] cells = new String[4 + naturalLanguages.size()];
// headers
{
int c = 0;
cells[c++] = "pkg-name";
cells[c++] = "repository";
cells[c++] = "architecture";
cells[c++] = "latest-version-coordinates";
for (NaturalLanguage naturalLanguage : naturalLanguages) {
cells[c++] = naturalLanguage.getCode();
}
}
long startMs = System.currentTimeMillis();
writer.writeNext(cells);
// stream out the packages.
final long expectedTotal = pkgService.totalPkg(context, false);
final AtomicLong counter = new AtomicLong(0);
LOGGER.info("will produce package version localization report for {} packages", expectedTotal);
long count = pkgService.eachPkg(context, // allow source only.
false, pkg -> {
for (Repository repository : repositoryService.getRepositoriesForPkg(context, pkg)) {
for (Architecture architecture : architectures) {
Optional<PkgVersion> pkgVersionOptional = pkgService.getLatestPkgVersionForPkg(context, pkg, repository, Collections.singletonList(architecture));
if (pkgVersionOptional.isPresent()) {
int c = 0;
cells[c++] = pkg.getName();
cells[c++] = pkgVersionOptional.get().getRepositorySource().getRepository().getCode();
cells[c++] = architecture.getCode();
cells[c++] = pkgVersionOptional.get().toVersionCoordinates().toString();
for (NaturalLanguage naturalLanguage : naturalLanguages) {
Optional<PkgVersionLocalization> pkgVersionLocalizationOptional = pkgVersionOptional.get().getPkgVersionLocalization(naturalLanguage);
cells[c++] = pkgVersionLocalizationOptional.isPresent() ? MARKER : "";
}
writer.writeNext(cells);
}
}
}
jobService.setJobProgressPercent(specification.getGuid(), (int) ((100 * counter.incrementAndGet()) / expectedTotal));
// keep going!
return true;
});
LOGGER.info("did produce pkg version localization coverage spreadsheet report for {} packages in {}ms", count, System.currentTimeMillis() - startMs);
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class ReferenceDumpExportJobRunner method run.
@Override
public void run(JobService jobService, ReferenceDumpExportJobSpecification specification) throws IOException {
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.JSON_UTF_8.toString());
try (final OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(outputStream);
final JsonGenerator jsonGenerator = objectMapper.getFactory().createGenerator(gzipOutputStream)) {
jsonGenerator.writeStartObject();
writeInfo(jsonGenerator);
writeData(jsonGenerator, specification);
jsonGenerator.writeEndObject();
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class RepositoryDumpExportJobRunner method run.
@Override
public void run(JobService jobService, RepositoryDumpExportJobSpecification specification) throws IOException, JobRunnerException {
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.JSON_UTF_8.toString());
try (final OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(outputStream);
final JsonGenerator jsonGenerator = objectMapper.getFactory().createGenerator(gzipOutputStream)) {
ObjectContext context = serverRuntime.newContext();
List<Repository> repositories = Repository.getAll(context).stream().filter(_Repository::getActive).collect(Collectors.toList());
jsonGenerator.writeStartObject();
writeInfo(jsonGenerator, repositories);
writeRepositories(jsonGenerator, repositories);
jsonGenerator.writeEndObject();
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class TestNumberedLinesJobRunner method run.
@Override
public void run(JobService jobService, TestNumberedLinesJobSpecification specification) throws IOException, JobRunnerException {
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.PLAIN_TEXT_UTF_8.toString());
try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openStream();
Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8)) {
for (int i = 0; i < specification.getLines(); i++) {
LOGGER.info("written line {}", i);
writer.append(Integer.toString(i));
writer.append('\n');
Uninterruptibles.sleepUninterruptibly(specification.getDelayPerLineMillis(), TimeUnit.MILLISECONDS);
}
}
}
use of org.haiku.haikudepotserver.job.model.JobDataWithByteSink in project haikudepotserver by haiku.
the class AbstractPkgResourceExportArchiveJobRunner method run.
@Override
public void run(JobService jobService, T specification) throws IOException {
Preconditions.checkArgument(null != jobService);
Preconditions.checkArgument(null != specification);
Stopwatch stopwatch = Stopwatch.createStarted();
final ObjectContext context = serverRuntime.newContext();
int offset = 0;
// this will register the outbound data against the job.
JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.TAR.toString());
try (final OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
// tars assumed to be compressed
final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(outputStream);
final TarArchiveOutputStream tarOutputStream = new TarArchiveOutputStream(gzipOutputStream)) {
State state = new State();
state.tarArchiveOutputStream = tarOutputStream;
SQLTemplate query = createQuery(specification);
query.setFetchLimit(getBatchSize());
int countLastQuery;
do {
query.setFetchOffset(offset);
List<DataRow> queryResults = context.performQuery(query);
countLastQuery = queryResults.size();
appendFromRawRows(state, queryResults);
offset += countLastQuery;
if (0 == offset % 100) {
LOGGER.debug("processed {} entries", offset + 1);
}
} while (countLastQuery > 0);
appendArchiveInfo(state);
}
LOGGER.info("did produce report for {} entries in {}ms", offset, stopwatch.elapsed(TimeUnit.MILLISECONDS));
}
Aggregations