Search in sources :

Example 1 with JobRunnerException

use of org.haiku.haikudepotserver.job.model.JobRunnerException in project haikudepotserver by haiku.

the class PkgScreenshotImportArchiveJobRunner method run.

@Override
public void run(JobService jobService, PkgScreenshotImportArchiveJobSpecification specification) throws IOException, JobRunnerException {
    Preconditions.checkArgument(null != jobService);
    Preconditions.checkArgument(null != specification);
    Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing input data guid on specification");
    Preconditions.checkArgument(null != specification.getImportStrategy(), "missing import strategy on specification");
    // this will register the outbound data against the job.
    JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
    Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(specification.getInputDataGuid());
    if (!jobDataWithByteSourceOptional.isPresent()) {
        throw new IllegalStateException("the job data was not able to be found for guid; " + specification.getInputDataGuid());
    }
    if (!serverRuntime.performInTransaction(() -> {
        try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
            OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
            CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
            Map<String, ScreenshotImportMetadatas> metadatas = new HashMap<>();
            writer.writeNext(new String[] { "path", "pkg-name", "action", "message", "code" });
            // sweep through and collect meta-data about the packages in the tar file.
            LOGGER.info("will collect data about packages' screenshots from the archive", metadatas.size());
            consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> collectScreenshotMetadataFromArchive(metadatas, ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
            LOGGER.info("did collect data about {} packages' screenshots from the archive", metadatas.size());
            LOGGER.info("will collect data about persisted packages' screenshots");
            collectPersistedScreenshotMetadata(metadatas);
            LOGGER.info("did collect data about persisted packages' screenshots");
            if (specification.getImportStrategy() == PkgScreenshotImportArchiveJobSpecification.ImportStrategy.REPLACE) {
                LOGGER.info("will delete persisted screenshots that are absent from the archive");
                int deleted = deletePersistedScreenshotsThatAreNotPresentInArchiveAndReport(writer, metadatas.values());
                LOGGER.info("did delete {} persisted screenshots that are absent from the archive", deleted);
            }
            blendInArtificialOrderings(metadatas.values());
            // sweep through the archive again and load in those screenshots that are not already present.
            // The ordering of the inbound data should be preserved.
            LOGGER.info("will load screenshots from archive", metadatas.size());
            consumeScreenshotArchiveEntries(jobDataWithByteSourceOptional.get().getByteSource(), (ae) -> importScreenshotsFromArchiveAndReport(writer, metadatas.get(ae.getPkgName()), ae.getArchiveInputStream(), ae.getArchiveEntry(), ae.getPkgName(), ae.getOrder()));
            LOGGER.info("did load screenshots from archive", metadatas.size());
            return true;
        } catch (IOException e) {
            LOGGER.error("unable to complete the job", e);
        }
        return false;
    })) {
        throw new JobRunnerException("unable to complete job");
    }
}
Also used : ObjectContext(org.apache.cayenne.ObjectContext) java.util(java.util) GZIPInputStream(java.util.zip.GZIPInputStream) ArchiveEntry(org.apache.commons.compress.archivers.ArchiveEntry) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) PkgScreenshotImage(org.haiku.haikudepotserver.dataobjects.PkgScreenshotImage) PkgScreenshotService(org.haiku.haikudepotserver.pkg.model.PkgScreenshotService) LoggerFactory(org.slf4j.LoggerFactory) Hashing(com.google.common.hash.Hashing) HashingInputStream(com.google.common.hash.HashingInputStream) BadPkgScreenshotException(org.haiku.haikudepotserver.pkg.model.BadPkgScreenshotException) Matcher(java.util.regex.Matcher) JobDataWithByteSource(org.haiku.haikudepotserver.job.model.JobDataWithByteSource) JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) JobService(org.haiku.haikudepotserver.job.model.JobService) ByteSource(com.google.common.io.ByteSource) MediaType(com.google.common.net.MediaType) Pkg(org.haiku.haikudepotserver.dataobjects.Pkg) Logger(org.slf4j.Logger) HashCode(com.google.common.hash.HashCode) AbstractJobRunner(org.haiku.haikudepotserver.job.AbstractJobRunner) CSVWriter(com.opencsv.CSVWriter) PkgScreenshotImportArchiveJobSpecification(org.haiku.haikudepotserver.pkg.model.PkgScreenshotImportArchiveJobSpecification) PkgScreenshot(org.haiku.haikudepotserver.dataobjects.PkgScreenshot) Consumer(java.util.function.Consumer) Component(org.springframework.stereotype.Component) java.io(java.io) ByteStreams(com.google.common.io.ByteStreams) Preconditions(com.google.common.base.Preconditions) Pattern(java.util.regex.Pattern) HashFunction(com.google.common.hash.HashFunction) JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) ServerRuntime(org.apache.cayenne.configuration.server.ServerRuntime) JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) JobDataWithByteSource(org.haiku.haikudepotserver.job.model.JobDataWithByteSource) CSVWriter(com.opencsv.CSVWriter) JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink)

Example 2 with JobRunnerException

use of org.haiku.haikudepotserver.job.model.JobRunnerException in project haikudepotserver by haiku.

the class PkgScreenshotOptimizationJobRunner method run.

@Override
public void run(JobService jobService, PkgScreenshotOptimizationJobSpecification specification) throws JobRunnerException {
    Preconditions.checkArgument(null != jobService);
    Preconditions.checkArgument(null != specification);
    long startMs = System.currentTimeMillis();
    LOGGER.info("will optimize {} screenshot images", specification.getPkgScreenshotCodes().size());
    for (String pkgScreenshotCode : specification.getPkgScreenshotCodes()) {
        ObjectContext context = serverRuntime.newContext();
        Optional<PkgScreenshot> pkgScreenshotOptional = PkgScreenshot.tryGetByCode(context, pkgScreenshotCode);
        if (pkgScreenshotOptional.isPresent()) {
            try {
                if (screenshotService.optimizeScreenshot(context, pkgScreenshotOptional.get())) {
                    context.commitChanges();
                }
            } catch (IOException ioe) {
                throw new UncheckedIOException(ioe);
            } catch (BadPkgScreenshotException bpse) {
                throw new JobRunnerException("unable to process a screenshot image", bpse);
            }
        }
        LOGGER.info("did optimize {} screenshot images in {}ms", specification.getPkgScreenshotCodes().size(), System.currentTimeMillis() - startMs);
    }
}
Also used : BadPkgScreenshotException(org.haiku.haikudepotserver.pkg.model.BadPkgScreenshotException) JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) PkgScreenshot(org.haiku.haikudepotserver.dataobjects.PkgScreenshot) UncheckedIOException(java.io.UncheckedIOException) ObjectContext(org.apache.cayenne.ObjectContext) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException)

Example 3 with JobRunnerException

use of org.haiku.haikudepotserver.job.model.JobRunnerException in project haikudepotserver by haiku.

the class PkgCategoryCoverageImportSpreadsheetJobRunner method run.

@Override
public void run(JobService jobService, PkgCategoryCoverageImportSpreadsheetJobSpecification specification) throws IOException, JobRunnerException {
    Preconditions.checkArgument(null != jobService);
    Preconditions.checkArgument(null != specification);
    Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing imput data guid on specification");
    // this will register the outbound data against the job.
    JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
    // if there is input data then feed it in and process it to manipulate the packages'
    // categories.
    Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(specification.getInputDataGuid());
    if (jobDataWithByteSourceOptional.isEmpty()) {
        throw new IllegalStateException("the job data was not able to be found for guid; " + specification.getInputDataGuid());
    }
    try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
        OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
        CSVWriter writer = new CSVWriter(outputStreamWriter, ',');
        InputStream inputStream = jobDataWithByteSourceOptional.get().getByteSource().openStream();
        InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
        CSVReader reader = new CSVReader(inputStreamReader)) {
        // headers
        List<String> pkgCategoryCodes = getPkgCategoryCodes();
        String[] headings = getHeadingRow(pkgCategoryCodes);
        // read in the first row of the input and check the headings are there to quasi-validate
        // that the input is not some random rubbish.
        String[] headerRow = reader.readNext();
        if (headings.length != headerRow.length) {
            throw new JobRunnerException("wrong number of header columns in input");
        }
        if (!Arrays.equals(headerRow, headings)) {
            throw new JobRunnerException("mismatched input headers");
        }
        writer.writeNext(headings);
        serverRuntime.performInTransaction(() -> {
            try {
                String[] row;
                while (null != (row = reader.readNext())) {
                    if (0 != row.length) {
                        ObjectContext rowContext = serverRuntime.newContext();
                        Action action = Action.NOACTION;
                        if (row.length < headings.length - 1) {
                            // -1 because it is possible to omit the action column.
                            action = Action.INVALID;
                            LOGGER.warn("inconsistent number of cells on line");
                        } else {
                            String pkgName = row[0];
                            // 1; display
                            boolean isNone = AbstractJobRunner.MARKER.equals(row[COLUMN_NONE]);
                            Optional<Pkg> pkgOptional = Pkg.tryGetByName(rowContext, pkgName);
                            List<String> selectedPkgCategoryCodes = new ArrayList<>();
                            if (pkgOptional.isPresent()) {
                                for (int i = 0; i < pkgCategoryCodes.size(); i++) {
                                    if (AbstractJobRunner.MARKER.equals(row[COLUMN_NONE + 1 + i].trim())) {
                                        if (isNone) {
                                            action = Action.INVALID;
                                            LOGGER.warn("line for package {} has 'none' marked as well as an actual category", row[0]);
                                        }
                                        selectedPkgCategoryCodes.add(pkgCategoryCodes.get(i));
                                    }
                                }
                                if (action == Action.NOACTION) {
                                    List<PkgCategory> selectedPkgCategories = PkgCategory.getByCodes(rowContext, selectedPkgCategoryCodes);
                                    if (selectedPkgCategories.size() != selectedPkgCategoryCodes.size()) {
                                        throw new IllegalStateException("one or more of the package category codes was not able to be found");
                                    }
                                    if (pkgService.updatePkgCategories(rowContext, pkgOptional.get(), selectedPkgCategories)) {
                                        action = Action.UPDATED;
                                        rowContext.commitChanges();
                                        LOGGER.debug("did update for package {}", row[0]);
                                    }
                                }
                            } else {
                                action = Action.NOTFOUND;
                                LOGGER.debug("unable to find the package for {}", row[0]);
                            }
                        }
                        // copy the row back verbatim, but with the action result at the
                        // end.
                        List<String> rowOutput = new ArrayList<>();
                        Collections.addAll(rowOutput, row);
                        while (rowOutput.size() < headings.length) {
                            rowOutput.add("");
                        }
                        rowOutput.remove(rowOutput.size() - 1);
                        rowOutput.add(action.name());
                        writer.writeNext(rowOutput.toArray(new String[0]));
                    }
                }
            } catch (Throwable th) {
                LOGGER.error("a problem has arisen importing package categories from a spreadsheet", th);
            }
            return null;
        });
    }
}
Also used : JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) JobDataWithByteSource(org.haiku.haikudepotserver.job.model.JobDataWithByteSource) CSVReader(com.opencsv.CSVReader) PkgCategory(org.haiku.haikudepotserver.dataobjects.PkgCategory) CSVWriter(com.opencsv.CSVWriter) Pkg(org.haiku.haikudepotserver.dataobjects.Pkg) JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink) ObjectContext(org.apache.cayenne.ObjectContext)

Example 4 with JobRunnerException

use of org.haiku.haikudepotserver.job.model.JobRunnerException in project haikudepotserver by haiku.

the class PkgIconImportArchiveJobRunner method run.

@Override
public void run(JobService jobService, PkgIconImportArchiveJobSpecification specification) throws IOException, JobRunnerException {
    Preconditions.checkArgument(null != jobService);
    Preconditions.checkArgument(null != specification);
    Preconditions.checkArgument(null != specification.getInputDataGuid(), "missing input data guid on specification");
    // this will register the outbound data against the job.
    JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
    Optional<JobDataWithByteSource> jobDataWithByteSourceOptional = jobService.tryObtainData(specification.getInputDataGuid());
    if (!jobDataWithByteSourceOptional.isPresent()) {
        throw new IllegalStateException("the job data was not able to be found for guid; " + specification.getInputDataGuid());
    }
    if (!serverRuntime.performInTransaction(() -> {
        try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
            OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
            CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
            String[] headings = new String[] { "path", "action", "message" };
            writer.writeNext(headings);
            try (InputStream inputStream = jobDataWithByteSourceOptional.get().getByteSource().openStream();
                GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream);
                TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream)) {
                clearPackagesIconsAppearingInArchive(tarArchiveInputStream, writer);
            }
            try (InputStream inputStream = jobDataWithByteSourceOptional.get().getByteSource().openStream();
                GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream);
                TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream)) {
                processEntriesFromArchive(tarArchiveInputStream, writer);
            }
            return true;
        } catch (IOException e) {
            LOGGER.error("unable to complete job; ", e);
        }
        return false;
    })) {
        throw new JobRunnerException("unable to complete job");
    }
}
Also used : JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink) GZIPInputStream(java.util.zip.GZIPInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) JobDataWithByteSource(org.haiku.haikudepotserver.job.model.JobDataWithByteSource) GZIPInputStream(java.util.zip.GZIPInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) CSVWriter(com.opencsv.CSVWriter) OutputStreamWriter(java.io.OutputStreamWriter) IOException(java.io.IOException)

Example 5 with JobRunnerException

use of org.haiku.haikudepotserver.job.model.JobRunnerException in project haikudepotserver by haiku.

the class AuthorizationRulesSpreadsheetJobRunner method run.

@Override
public void run(JobService jobService, AuthorizationRulesSpreadsheetJobSpecification specification) throws IOException, JobRunnerException {
    final ObjectContext context = serverRuntime.newContext();
    DateTimeFormatter dateTimeFormatter = DateTimeHelper.createStandardDateTimeFormat();
    // this will register the outbound data against the job.
    JobDataWithByteSink jobDataWithByteSink = jobService.storeGeneratedData(specification.getGuid(), "download", MediaType.CSV_UTF_8.toString());
    try (OutputStream outputStream = jobDataWithByteSink.getByteSink().openBufferedStream();
        OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream);
        CSVWriter writer = new CSVWriter(outputStreamWriter, ',')) {
        writer.writeNext(new String[] { "create-timestamp", "user-nickname", "user-active", "permission-code", "permission-name", "pkg-name" });
        ObjectSelect<PermissionUserPkg> objectSelect = ObjectSelect.query(PermissionUserPkg.class).orderBy(PermissionUserPkg.USER.dot(User.NICKNAME).asc(), PermissionUserPkg.PERMISSION.dot(Permission.CODE).asc());
        try (ResultBatchIterator<PermissionUserPkg> batchIterator = objectSelect.batchIterator(context, 50)) {
            batchIterator.forEach((pups) -> pups.forEach((pup) -> writer.writeNext(new String[] { dateTimeFormatter.format(Instant.ofEpochMilli(pup.getCreateTimestamp().getTime())), pup.getUser().getNickname(), Boolean.toString(pup.getUser().getActive()), pup.getPermission().getCode(), pup.getPermission().getName(), null != pup.getPkg() ? pup.getPkg().getName() : "" })));
        }
        writer.flush();
        outputStreamWriter.flush();
    }
}
Also used : JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink) OutputStream(java.io.OutputStream) MediaType(com.google.common.net.MediaType) ObjectContext(org.apache.cayenne.ObjectContext) AbstractJobRunner(org.haiku.haikudepotserver.job.AbstractJobRunner) Resource(javax.annotation.Resource) CSVWriter(com.opencsv.CSVWriter) ResultBatchIterator(org.apache.cayenne.ResultBatchIterator) IOException(java.io.IOException) Instant(java.time.Instant) AuthorizationRulesSpreadsheetJobSpecification(org.haiku.haikudepotserver.security.model.AuthorizationRulesSpreadsheetJobSpecification) Component(org.springframework.stereotype.Component) Permission(org.haiku.haikudepotserver.dataobjects.Permission) DateTimeFormatter(java.time.format.DateTimeFormatter) PermissionUserPkg(org.haiku.haikudepotserver.dataobjects.PermissionUserPkg) JobDataWithByteSink(org.haiku.haikudepotserver.job.model.JobDataWithByteSink) OutputStreamWriter(java.io.OutputStreamWriter) Preconditions(com.google.common.base.Preconditions) ObjectSelect(org.apache.cayenne.query.ObjectSelect) User(org.haiku.haikudepotserver.dataobjects.User) JobService(org.haiku.haikudepotserver.job.model.JobService) JobRunnerException(org.haiku.haikudepotserver.job.model.JobRunnerException) ServerRuntime(org.apache.cayenne.configuration.server.ServerRuntime) DateTimeHelper(org.haiku.haikudepotserver.support.DateTimeHelper) OutputStream(java.io.OutputStream) CSVWriter(com.opencsv.CSVWriter) OutputStreamWriter(java.io.OutputStreamWriter) ObjectContext(org.apache.cayenne.ObjectContext) DateTimeFormatter(java.time.format.DateTimeFormatter) PermissionUserPkg(org.haiku.haikudepotserver.dataobjects.PermissionUserPkg)

Aggregations

JobRunnerException (org.haiku.haikudepotserver.job.model.JobRunnerException)5 CSVWriter (com.opencsv.CSVWriter)4 ObjectContext (org.apache.cayenne.ObjectContext)4 JobDataWithByteSink (org.haiku.haikudepotserver.job.model.JobDataWithByteSink)4 IOException (java.io.IOException)3 JobDataWithByteSource (org.haiku.haikudepotserver.job.model.JobDataWithByteSource)3 Preconditions (com.google.common.base.Preconditions)2 MediaType (com.google.common.net.MediaType)2 OutputStream (java.io.OutputStream)2 OutputStreamWriter (java.io.OutputStreamWriter)2 GZIPInputStream (java.util.zip.GZIPInputStream)2 ServerRuntime (org.apache.cayenne.configuration.server.ServerRuntime)2 ArchiveInputStream (org.apache.commons.compress.archivers.ArchiveInputStream)2 TarArchiveInputStream (org.apache.commons.compress.archivers.tar.TarArchiveInputStream)2 Pkg (org.haiku.haikudepotserver.dataobjects.Pkg)2 PkgScreenshot (org.haiku.haikudepotserver.dataobjects.PkgScreenshot)2 AbstractJobRunner (org.haiku.haikudepotserver.job.AbstractJobRunner)2 JobService (org.haiku.haikudepotserver.job.model.JobService)2 BadPkgScreenshotException (org.haiku.haikudepotserver.pkg.model.BadPkgScreenshotException)2 Component (org.springframework.stereotype.Component)2