Search in sources :

Example 16 with IntFunction

use of java.util.function.IntFunction in project beam by apache.

the class MongoDbTable method translateRexNodeToBson.

/**
 * Recursively translates a single RexNode to MongoDB Bson filter. Supports simple comparison
 * operations, negation, and nested conjunction/disjunction. Boolean fields are translated as an
 * `$eq` operation with a boolean `true`.
 *
 * @param node {@code RexNode} to translate.
 * @return {@code Bson} filter.
 */
private Bson translateRexNodeToBson(RexNode node) {
    final IntFunction<String> fieldIdToName = i -> getSchema().getField(i).getName();
    // Supported operations are described in MongoDbFilter#isSupported
    if (node instanceof RexCall) {
        RexCall compositeNode = (RexCall) node;
        List<RexLiteral> literals = new ArrayList<>();
        List<RexInputRef> inputRefs = new ArrayList<>();
        for (RexNode operand : compositeNode.getOperands()) {
            if (operand instanceof RexLiteral) {
                literals.add((RexLiteral) operand);
            } else if (operand instanceof RexInputRef) {
                inputRefs.add((RexInputRef) operand);
            }
        }
        // Operation is a comparison, since one of the operands in a field reference.
        if (inputRefs.size() == 1) {
            RexInputRef inputRef = inputRefs.get(0);
            String inputFieldName = fieldIdToName.apply(inputRef.getIndex());
            if (literals.size() > 0) {
                // Convert literal value to the same Java type as the field we are comparing to.
                Object literal = convertToExpectedType(inputRef, literals.get(0));
                switch(node.getKind()) {
                    case IN:
                        return Filters.in(inputFieldName, convertToExpectedType(inputRef, literals));
                    case EQUALS:
                        return Filters.eq(inputFieldName, literal);
                    case NOT_EQUALS:
                        return Filters.not(Filters.eq(inputFieldName, literal));
                    case LESS_THAN:
                        return Filters.lt(inputFieldName, literal);
                    case GREATER_THAN:
                        return Filters.gt(inputFieldName, literal);
                    case GREATER_THAN_OR_EQUAL:
                        return Filters.gte(inputFieldName, literal);
                    case LESS_THAN_OR_EQUAL:
                        return Filters.lte(inputFieldName, literal);
                    default:
                        // Encountered an unexpected node kind, RuntimeException below.
                        break;
                }
            } else if (node.getKind().equals(SqlKind.NOT)) {
                // Ex: `where not boolean_field`
                return Filters.not(translateRexNodeToBson(inputRef));
            } else {
                throw new RuntimeException("Cannot create a filter for an unsupported node: " + node.toString());
            }
        } else {
            // Operation is a conjunction/disjunction.
            switch(node.getKind()) {
                case AND:
                    // Recursively construct filter for each operand of conjunction.
                    return Filters.and(compositeNode.getOperands().stream().map(this::translateRexNodeToBson).collect(Collectors.toList()));
                case OR:
                    // Recursively construct filter for each operand of disjunction.
                    return Filters.or(compositeNode.getOperands().stream().map(this::translateRexNodeToBson).collect(Collectors.toList()));
                default:
                    // Encountered an unexpected node kind, RuntimeException below.
                    break;
            }
        }
        throw new RuntimeException("Encountered an unexpected node kind: " + node.getKind().toString());
    } else if (node instanceof RexInputRef && node.getType().getSqlTypeName().equals(SqlTypeName.BOOLEAN)) {
        // Boolean field, must be true. Ex: `select * from table where bool_field`
        return Filters.eq(fieldIdToName.apply(((RexInputRef) node).getIndex()), true);
    }
    throw new RuntimeException("Was expecting a RexCall or a boolean RexInputRef, but received: " + node.getClass().getSimpleName());
}
Also used : Document(org.bson.Document) PBegin(org.apache.beam.sdk.values.PBegin) LoggerFactory(org.slf4j.LoggerFactory) MongoDbIO(org.apache.beam.sdk.io.mongodb.MongoDbIO) IsBounded(org.apache.beam.sdk.values.PCollection.IsBounded) JsonToRow(org.apache.beam.sdk.transforms.JsonToRow) SimpleFunction(org.apache.beam.sdk.transforms.SimpleFunction) ProjectSupport(org.apache.beam.sdk.extensions.sql.meta.ProjectSupport) Table(org.apache.beam.sdk.extensions.sql.meta.Table) Matcher(java.util.regex.Matcher) AND(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlKind.AND) RexLiteral(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral) MapElements(org.apache.beam.sdk.transforms.MapElements) SchemaBaseBeamTable(org.apache.beam.sdk.extensions.sql.meta.SchemaBaseBeamTable) FieldType(org.apache.beam.sdk.schemas.Schema.FieldType) Collectors(java.util.stream.Collectors) FindQuery(org.apache.beam.sdk.io.mongodb.FindQuery) Serializable(java.io.Serializable) InvalidTableException(org.apache.beam.sdk.extensions.sql.meta.provider.InvalidTableException) VisibleForTesting(org.apache.beam.vendor.calcite.v1_28_0.com.google.common.annotations.VisibleForTesting) COMPARISON(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlKind.COMPARISON) POutput(org.apache.beam.sdk.values.POutput) List(java.util.List) ParDo(org.apache.beam.sdk.transforms.ParDo) BeamTableStatistics(org.apache.beam.sdk.extensions.sql.impl.BeamTableStatistics) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) Pattern(java.util.regex.Pattern) Experimental(org.apache.beam.sdk.annotations.Experimental) RexNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexNode) JsonWriterSettings(org.bson.json.JsonWriterSettings) SqlKind(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlKind) SelectHelpers(org.apache.beam.sdk.schemas.utils.SelectHelpers) ArrayList(java.util.ArrayList) PTransform(org.apache.beam.sdk.transforms.PTransform) Filters(com.mongodb.client.model.Filters) DefaultTableFilter(org.apache.beam.sdk.extensions.sql.meta.DefaultTableFilter) Bson(org.bson.conversions.Bson) ToJson(org.apache.beam.sdk.transforms.ToJson) JsonMode(org.bson.json.JsonMode) FieldAccessDescriptor(org.apache.beam.sdk.schemas.FieldAccessDescriptor) Row(org.apache.beam.sdk.values.Row) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) IntFunction(java.util.function.IntFunction) FieldTypeDescriptors(org.apache.beam.sdk.schemas.FieldTypeDescriptors) DoFn(org.apache.beam.sdk.transforms.DoFn) Logger(org.slf4j.Logger) BeamSqlTableFilter(org.apache.beam.sdk.extensions.sql.meta.BeamSqlTableFilter) PCollection(org.apache.beam.sdk.values.PCollection) Schema(org.apache.beam.sdk.schemas.Schema) RexCall(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexCall) SqlTypeName(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName) RexInputRef(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef) OR(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlKind.OR) RexCall(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexCall) RexLiteral(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral) ArrayList(java.util.ArrayList) RexInputRef(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexInputRef) RexNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexNode)

Example 17 with IntFunction

use of java.util.function.IntFunction in project beam by apache.

the class BigQueryTable method generateRowRestrictions.

private String generateRowRestrictions(Schema schema, List<RexNode> supported) {
    assert !supported.isEmpty();
    final IntFunction<SqlNode> field = i -> new SqlIdentifier(schema.getField(i).getName(), SqlParserPos.ZERO);
    // TODO: BigQuerySqlDialectWithTypeTranslation can be replaced with BigQuerySqlDialect after
    // updating vendor Calcite version.
    SqlImplementor.Context context = new BeamSqlUnparseContext(field);
    // Create a single SqlNode from a list of RexNodes
    SqlNode andSqlNode = null;
    for (RexNode node : supported) {
        SqlNode sqlNode = context.toSql(null, node);
        if (andSqlNode == null) {
            andSqlNode = sqlNode;
            continue;
        }
        // AND operator must have exactly 2 operands.
        andSqlNode = SqlStdOperatorTable.AND.createCall(SqlParserPos.ZERO, ImmutableList.of(andSqlNode, sqlNode));
    }
    return andSqlNode.toSqlString(BeamBigQuerySqlDialect.DEFAULT).getSql();
}
Also used : Arrays(java.util.Arrays) PBegin(org.apache.beam.sdk.values.PBegin) Experimental(org.apache.beam.sdk.annotations.Experimental) RexNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexNode) SqlIdentifier(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier) LoggerFactory(org.slf4j.LoggerFactory) ConversionOptions(org.apache.beam.sdk.io.gcp.bigquery.BigQueryUtils.ConversionOptions) BigQueryOptions(org.apache.beam.sdk.io.gcp.bigquery.BigQueryOptions) SqlImplementor(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.rel2sql.SqlImplementor) SqlParserPos(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.parser.SqlParserPos) SelectHelpers(org.apache.beam.sdk.schemas.utils.SelectHelpers) DefaultTableFilter(org.apache.beam.sdk.extensions.sql.meta.DefaultTableFilter) ProjectSupport(org.apache.beam.sdk.extensions.sql.meta.ProjectSupport) Table(org.apache.beam.sdk.extensions.sql.meta.Table) FieldAccessDescriptor(org.apache.beam.sdk.schemas.FieldAccessDescriptor) BigInteger(java.math.BigInteger) Row(org.apache.beam.sdk.values.Row) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) SqlNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode) IntFunction(java.util.function.IntFunction) SchemaBaseBeamTable(org.apache.beam.sdk.extensions.sql.meta.SchemaBaseBeamTable) BigQueryHelpers(org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers) Logger(org.slf4j.Logger) BigQueryIO(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO) SchemaCoder(org.apache.beam.sdk.schemas.SchemaCoder) BeamSqlTableFilter(org.apache.beam.sdk.extensions.sql.meta.BeamSqlTableFilter) SqlStdOperatorTable(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.fun.SqlStdOperatorTable) IOException(java.io.IOException) Method(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.Method) BigQueryUtils(org.apache.beam.sdk.io.gcp.bigquery.BigQueryUtils) PCollection(org.apache.beam.sdk.values.PCollection) Collectors(java.util.stream.Collectors) Schema(org.apache.beam.sdk.schemas.Schema) Serializable(java.io.Serializable) TypedRead(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead) VisibleForTesting(org.apache.beam.vendor.calcite.v1_28_0.com.google.common.annotations.VisibleForTesting) POutput(org.apache.beam.sdk.values.POutput) List(java.util.List) WriteDisposition(org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition) BeamTableStatistics(org.apache.beam.sdk.extensions.sql.impl.BeamTableStatistics) ImmutableList(org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList) SqlIdentifier(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier) SqlImplementor(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.rel2sql.SqlImplementor) SqlNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode) RexNode(org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexNode)

Example 18 with IntFunction

use of java.util.function.IntFunction in project j2objc by google.

the class DistinctOps method makeRef.

/**
 * Appends a "distinct" operation to the provided stream, and returns the
 * new stream.
 *
 * @param <T> the type of both input and output elements
 * @param upstream a reference stream with element type T
 * @return the new stream
 */
static <T> ReferencePipeline<T, T> makeRef(AbstractPipeline<?, T, ?> upstream) {
    return new ReferencePipeline.StatefulOp<T, T>(upstream, StreamShape.REFERENCE, StreamOpFlag.IS_DISTINCT | StreamOpFlag.NOT_SIZED) {

        <P_IN> Node<T> reduce(PipelineHelper<T> helper, Spliterator<P_IN> spliterator) {
            // If the stream is SORTED then it should also be ORDERED so the following will also
            // preserve the sort order
            TerminalOp<T, LinkedHashSet<T>> reduceOp = ReduceOps.<T, LinkedHashSet<T>>makeRef(LinkedHashSet::new, LinkedHashSet::add, LinkedHashSet::addAll);
            return Nodes.node(reduceOp.evaluateParallel(helper, spliterator));
        }

        @Override
        public <P_IN> Node<T> opEvaluateParallel(PipelineHelper<T> helper, Spliterator<P_IN> spliterator, IntFunction<T[]> generator) {
            if (StreamOpFlag.DISTINCT.isKnown(helper.getStreamAndOpFlags())) {
                // No-op
                return helper.evaluate(spliterator, false, generator);
            } else if (StreamOpFlag.ORDERED.isKnown(helper.getStreamAndOpFlags())) {
                return reduce(helper, spliterator);
            } else {
                // Holder of null state since ConcurrentHashMap does not support null values
                AtomicBoolean seenNull = new AtomicBoolean(false);
                ConcurrentHashMap<T, Boolean> map = new ConcurrentHashMap<>();
                TerminalOp<T, Void> forEachOp = ForEachOps.makeRef(t -> {
                    if (t == null)
                        seenNull.set(true);
                    else
                        map.putIfAbsent(t, Boolean.TRUE);
                }, false);
                forEachOp.evaluateParallel(helper, spliterator);
                // If null has been seen then copy the key set into a HashSet that supports null values
                // and add null
                Set<T> keys = map.keySet();
                if (seenNull.get()) {
                    // TODO Implement a more efficient set-union view, rather than copying
                    keys = new HashSet<>(keys);
                    keys.add(null);
                }
                return Nodes.node(keys);
            }
        }

        @Override
        public <P_IN> Spliterator<T> opEvaluateParallelLazy(PipelineHelper<T> helper, Spliterator<P_IN> spliterator) {
            if (StreamOpFlag.DISTINCT.isKnown(helper.getStreamAndOpFlags())) {
                // No-op
                return helper.wrapSpliterator(spliterator);
            } else if (StreamOpFlag.ORDERED.isKnown(helper.getStreamAndOpFlags())) {
                // Not lazy, barrier required to preserve order
                return reduce(helper, spliterator).spliterator();
            } else {
                // Lazy
                return new StreamSpliterators.DistinctSpliterator<>(helper.wrapSpliterator(spliterator));
            }
        }

        @Override
        public Sink<T> opWrapSink(int flags, Sink<T> sink) {
            Objects.requireNonNull(sink);
            if (StreamOpFlag.DISTINCT.isKnown(flags)) {
                return sink;
            } else if (StreamOpFlag.SORTED.isKnown(flags)) {
                return new Sink.ChainedReference<T, T>(sink) {

                    boolean seenNull;

                    T lastSeen;

                    @Override
                    public void begin(long size) {
                        seenNull = false;
                        lastSeen = null;
                        downstream.begin(-1);
                    }

                    @Override
                    public void end() {
                        seenNull = false;
                        lastSeen = null;
                        downstream.end();
                    }

                    @Override
                    public void accept(T t) {
                        if (t == null) {
                            if (!seenNull) {
                                seenNull = true;
                                downstream.accept(lastSeen = null);
                            }
                        } else if (lastSeen == null || !t.equals(lastSeen)) {
                            downstream.accept(lastSeen = t);
                        }
                    }
                };
            } else {
                return new Sink.ChainedReference<T, T>(sink) {

                    Set<T> seen;

                    @Override
                    public void begin(long size) {
                        seen = new HashSet<>();
                        downstream.begin(-1);
                    }

                    @Override
                    public void end() {
                        seen = null;
                        downstream.end();
                    }

                    @Override
                    public void accept(T t) {
                        if (!seen.contains(t)) {
                            seen.add(t);
                            downstream.accept(t);
                        }
                    }
                };
            }
        }
    };
}
Also used : LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) Objects(java.util.Objects) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Spliterator(java.util.Spliterator) LinkedHashSet(java.util.LinkedHashSet) IntFunction(java.util.function.IntFunction) HashSet(java.util.HashSet) Set(java.util.Set) LinkedHashSet(java.util.LinkedHashSet) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IntFunction(java.util.function.IntFunction) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Spliterator(java.util.Spliterator) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet)

Example 19 with IntFunction

use of java.util.function.IntFunction in project GDSC-SMLM by aherbert.

the class PeakFit method runMaximaFitting.

/**
 * Load the selected results from memory. All multiple frame results are added directly to the
 * results. All single frame results are added to a list of candidate maxima per frame and fitted
 * using the configured parameters.
 */
private void runMaximaFitting() {
    final MemoryPeakResults memoryResults = ResultsManager.loadInputResults(settings.inputOption, false, DistanceUnit.PIXEL);
    if (memoryResults == null || memoryResults.size() == 0) {
        log("No results for maxima fitting");
        return;
    }
    // The total frames (for progress reporting)
    int totalFrames;
    // A function that can convert a frame into a set of candidate indices
    final IntFunction<int[]> frameToMaxIndices;
    // The frames to process (should be sorted ascending)
    Supplier<IntStream> frames;
    // Support fitting all time frames with the same results.
    if (settings.fitAcrossAllFrames) {
        // Check if the input spans multiple frames
        if (getSingleFrame(memoryResults) == 0) {
            final int min = memoryResults.getMinFrame();
            final int max = memoryResults.getMaxFrame();
            final GenericDialog gd = new GenericDialog(TITLE);
            gd.enableYesNoCancel();
            gd.hideCancelButton();
            ImageJUtils.addMessage(gd, "Candidate maxima for fitting span multiple frames (%d-%d).\n \n" + "Please confirm the %s are correct.", min, max, TextUtils.pleural(memoryResults.size(), "candidate"));
            gd.showDialog();
            if (!gd.wasOKed()) {
                return;
            }
        }
        final int[] maxIndices = getMaxIndices(Arrays.asList(memoryResults.toArray()));
        // This may not work correctly if using for example a series image source that
        // incorrectly estimates the number of frames
        totalFrames = source.getFrames();
        frameToMaxIndices = frame -> maxIndices;
        frames = () -> IntStream.rangeClosed(1, totalFrames);
    } else {
        // Build a map between the time-frame and the results in that frame.
        final Map<Integer, List<PeakResult>> map = Arrays.stream(memoryResults.toArray()).parallel().filter(peakResult -> peakResult.getFrame() == peakResult.getEndFrame()).collect(Collectors.groupingBy(PeakResult::getFrame));
        totalFrames = map.size();
        // Build a function that can convert a frame into a set of candidate indices
        frameToMaxIndices = frame -> getMaxIndices(map.get(frame));
        frames = () -> map.keySet().stream().mapToInt(Integer::intValue).sorted();
    }
    final ImageStack stack = (extraSettings.showProcessedFrames) ? new ImageStack(bounds.width, bounds.height) : null;
    // Use the FitEngine to allow multi-threading.
    final FitEngine engine = createFitEngine(getNumberOfThreads(totalFrames));
    if (engine == null) {
        return;
    }
    final int step = ImageJUtils.getProgressInterval(totalFrames);
    // No crop bounds are supported.
    // To pre-process data for noise estimation
    final boolean isFitCameraCounts = fitConfig.isFitCameraCounts();
    final CameraModel cameraModel = fitConfig.getCameraModel();
    runTime = System.nanoTime();
    final AtomicBoolean shutdown = new AtomicBoolean();
    final String format = String.format("Slice: %%d / %d (Results=%%d)", totalFrames);
    frames.get().forEachOrdered(slice -> {
        if (shutdown.get() || escapePressed()) {
            shutdown.set(true);
            return;
        }
        final float[] data = source.get(slice);
        if (data == null) {
            shutdown.set(true);
            return;
        }
        if (slice % step == 0) {
            if (ImageJUtils.showStatus(() -> String.format(format, slice, results.size()))) {
                IJ.showProgress(slice, totalFrames);
            }
        }
        // We must pre-process the data before noise estimation
        final float[] data2 = data.clone();
        if (isFitCameraCounts) {
            cameraModel.removeBias(data2);
        } else {
            cameraModel.removeBiasAndGain(data2);
        }
        final float noise = FitWorker.estimateNoise(data2, source.getWidth(), source.getHeight(), config.getNoiseMethod());
        if (stack != null) {
            stack.addSlice(String.format("Frame %d - %d", source.getStartFrameNumber(), source.getEndFrameNumber()), data);
        }
        // Get the frame number from the source to allow for interlaced and aggregated data
        engine.run(createMaximaFitJob(frameToMaxIndices.apply(slice), source.getStartFrameNumber(), source.getEndFrameNumber(), data, bounds, noise));
    });
    engine.end(shutdown.get());
    time = engine.getTime();
    runTime = System.nanoTime() - runTime;
    if (stack != null) {
        ImageJUtils.display("Processed frames", stack);
    }
    showResults();
    source.close();
}
Also used : Color(java.awt.Color) Choice(java.awt.Choice) Arrays(java.util.Arrays) Calibration(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.Calibration) UnitConverterUtils(uk.ac.sussex.gdsc.smlm.data.config.UnitConverterUtils) ImageProcessor(ij.process.ImageProcessor) ImageSource(uk.ac.sussex.gdsc.smlm.results.ImageSource) Filter(uk.ac.sussex.gdsc.smlm.results.filter.Filter) PSFType(uk.ac.sussex.gdsc.smlm.data.config.PSFProtos.PSFType) StringUtils(org.apache.commons.lang3.StringUtils) ResultsSettings(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtos.ResultsSettings) Panel(java.awt.Panel) Vector(java.util.Vector) Matcher(java.util.regex.Matcher) YesNoCancelDialog(ij.gui.YesNoCancelDialog) MemoryPeakResults(uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults) Map(java.util.Map) FitProtosHelper(uk.ac.sussex.gdsc.smlm.data.config.FitProtosHelper) ImageJImageConverter(uk.ac.sussex.gdsc.smlm.ij.utils.ImageJImageConverter) FilePeakResults(uk.ac.sussex.gdsc.smlm.results.FilePeakResults) EnumSet(java.util.EnumSet) LutHelper(uk.ac.sussex.gdsc.core.ij.process.LutHelper) InputSource(uk.ac.sussex.gdsc.smlm.ij.plugins.ResultsManager.InputSource) DistanceUnit(uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.DistanceUnit) GuiProtosHelper(uk.ac.sussex.gdsc.smlm.ij.settings.GuiProtosHelper) TextUtils(uk.ac.sussex.gdsc.core.utils.TextUtils) Scrollbar(java.awt.Scrollbar) ImagePlus(ij.ImagePlus) CalibrationProtosHelper(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtosHelper) FitEngineSettings(uk.ac.sussex.gdsc.smlm.data.config.FitProtos.FitEngineSettings) PeakResultProcedureX(uk.ac.sussex.gdsc.smlm.results.procedures.PeakResultProcedureX) Prefs(ij.Prefs) FrameCounter(uk.ac.sussex.gdsc.smlm.results.count.FrameCounter) WindowManager(ij.WindowManager) PeakResult(uk.ac.sussex.gdsc.smlm.results.PeakResult) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) PointRoi(ij.gui.PointRoi) DataFilterMethod(uk.ac.sussex.gdsc.smlm.data.config.FitProtos.DataFilterMethod) GenericDialog(ij.gui.GenericDialog) FitConfiguration(uk.ac.sussex.gdsc.smlm.engine.FitConfiguration) Overlay(ij.gui.Overlay) IntFunction(java.util.function.IntFunction) SeriesOpener(uk.ac.sussex.gdsc.core.ij.SeriesOpener) FitWorker(uk.ac.sussex.gdsc.smlm.engine.FitWorker) FitEngine(uk.ac.sussex.gdsc.smlm.engine.FitEngine) File(java.io.File) AggregatedImageSource(uk.ac.sussex.gdsc.smlm.results.AggregatedImageSource) DirectFilter(uk.ac.sussex.gdsc.smlm.results.filter.DirectFilter) ImageJTablePeakResults(uk.ac.sussex.gdsc.smlm.ij.results.ImageJTablePeakResults) ImageStack(ij.ImageStack) CameraModel(uk.ac.sussex.gdsc.smlm.model.camera.CameraModel) PsfHelper(uk.ac.sussex.gdsc.smlm.data.config.PsfHelper) FitJob(uk.ac.sussex.gdsc.smlm.engine.FitJob) ResultsTableSettings(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtos.ResultsTableSettings) FitTask(uk.ac.sussex.gdsc.smlm.engine.FitParameters.FitTask) ItemListener(java.awt.event.ItemListener) PSFParameter(uk.ac.sussex.gdsc.smlm.data.config.PSFProtos.PSFParameter) FitSolver(uk.ac.sussex.gdsc.smlm.data.config.FitProtos.FitSolver) ResultsImageSettings(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtos.ResultsImageSettings) ImageJPluginLoggerHelper(uk.ac.sussex.gdsc.core.ij.ImageJPluginLoggerHelper) XyResultProcedure(uk.ac.sussex.gdsc.smlm.results.procedures.XyResultProcedure) InterlacedImageSource(uk.ac.sussex.gdsc.smlm.results.InterlacedImageSource) ImageJImagePeakResults(uk.ac.sussex.gdsc.smlm.ij.results.ImageJImagePeakResults) PeakResults(uk.ac.sussex.gdsc.smlm.results.PeakResults) MathUtils(uk.ac.sussex.gdsc.core.utils.MathUtils) CalibrationWriter(uk.ac.sussex.gdsc.smlm.data.config.CalibrationWriter) PlugInFilter(ij.plugin.filter.PlugInFilter) PsfProtosHelper(uk.ac.sussex.gdsc.smlm.data.config.PsfProtosHelper) FitParameters(uk.ac.sussex.gdsc.smlm.engine.FitParameters) SettingsManager(uk.ac.sussex.gdsc.smlm.ij.settings.SettingsManager) ItemEvent(java.awt.event.ItemEvent) CameraType(uk.ac.sussex.gdsc.smlm.data.config.CalibrationProtos.CameraType) ExtendedGenericDialog(uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog) PeakResultsList(uk.ac.sussex.gdsc.smlm.results.PeakResultsList) TrackProgressAdaptor(uk.ac.sussex.gdsc.core.logging.TrackProgressAdaptor) ResultsImageType(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtos.ResultsImageType) FitEngineConfiguration(uk.ac.sussex.gdsc.smlm.engine.FitEngineConfiguration) OffsetPointRoi(uk.ac.sussex.gdsc.core.ij.gui.OffsetPointRoi) GridBagConstraints(java.awt.GridBagConstraints) Logger(java.util.logging.Logger) Collectors(java.util.stream.Collectors) BitFlagUtils(uk.ac.sussex.gdsc.core.utils.BitFlagUtils) List(java.util.List) SpotFilter(uk.ac.sussex.gdsc.smlm.filters.SpotFilter) LUT(ij.process.LUT) Pattern(java.util.regex.Pattern) FitQueue(uk.ac.sussex.gdsc.smlm.engine.FitQueue) SeriesImageSource(uk.ac.sussex.gdsc.smlm.ij.SeriesImageSource) TypeConverter(uk.ac.sussex.gdsc.core.data.utils.TypeConverter) Roi(ij.gui.Roi) ParameterisedFitJob(uk.ac.sussex.gdsc.smlm.engine.ParameterisedFitJob) IntStream(java.util.stream.IntStream) Rectangle(java.awt.Rectangle) Insets(java.awt.Insets) PrecisionMethod(uk.ac.sussex.gdsc.smlm.data.config.FitProtos.PrecisionMethod) PSFCalculatorSettings(uk.ac.sussex.gdsc.smlm.ij.settings.GUIProtos.PSFCalculatorSettings) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) PSF(uk.ac.sussex.gdsc.smlm.data.config.PSFProtos.PSF) ResultsFileSettings(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtos.ResultsFileSettings) AtomicReference(java.util.concurrent.atomic.AtomicReference) TextField(java.awt.TextField) OptionListener(uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog.OptionListener) IJImageSource(uk.ac.sussex.gdsc.smlm.ij.IJImageSource) NoiseEstimatorMethod(uk.ac.sussex.gdsc.smlm.data.config.FitProtos.NoiseEstimatorMethod) ResultsProtosHelper(uk.ac.sussex.gdsc.smlm.data.config.ResultsProtosHelper) TemplateSettings(uk.ac.sussex.gdsc.smlm.data.config.TemplateProtos.TemplateSettings) FastMleSteppingFunctionSolver(uk.ac.sussex.gdsc.smlm.fitting.nonlinear.FastMleSteppingFunctionSolver) SystemColor(java.awt.SystemColor) AstigmatismModel(uk.ac.sussex.gdsc.smlm.data.config.PSFProtos.AstigmatismModel) Iterator(java.util.Iterator) Checkbox(java.awt.Checkbox) Label(java.awt.Label) LutColour(uk.ac.sussex.gdsc.core.ij.process.LutHelper.LutColour) CalibrationReader(uk.ac.sussex.gdsc.smlm.data.config.CalibrationReader) TimeUnit(uk.ac.sussex.gdsc.smlm.data.config.UnitProtos.TimeUnit) Counter(uk.ac.sussex.gdsc.smlm.results.count.Counter) ImageJUtils(uk.ac.sussex.gdsc.core.ij.ImageJUtils) IJ(ij.IJ) PerPixelCameraModel(uk.ac.sussex.gdsc.smlm.model.camera.PerPixelCameraModel) CameraModel(uk.ac.sussex.gdsc.smlm.model.camera.CameraModel) PerPixelCameraModel(uk.ac.sussex.gdsc.smlm.model.camera.PerPixelCameraModel) ImageStack(ij.ImageStack) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FitEngine(uk.ac.sussex.gdsc.smlm.engine.FitEngine) GenericDialog(ij.gui.GenericDialog) ExtendedGenericDialog(uk.ac.sussex.gdsc.core.ij.gui.ExtendedGenericDialog) MemoryPeakResults(uk.ac.sussex.gdsc.smlm.results.MemoryPeakResults) ArrayList(java.util.ArrayList) PeakResultsList(uk.ac.sussex.gdsc.smlm.results.PeakResultsList) List(java.util.List) IntStream(java.util.stream.IntStream)

Example 20 with IntFunction

use of java.util.function.IntFunction in project alf.io by alfio-event.

the class TicketRepository method reserveTickets.

default int reserveTickets(String reservationId, List<Integer> ticketIds, TicketCategory category, String userLanguage, PriceContainer.VatStatus vatStatus, IntFunction<String> ticketMetadataSupplier) {
    var idx = new AtomicInteger();
    var batchReserveParameters = ticketIds.stream().map(id -> new MapSqlParameterSource("reservationId", reservationId).addValue("id", id).addValue("categoryId", category.getId()).addValue("userLanguage", userLanguage).addValue("srcPriceCts", category.getSrcPriceCts()).addValue("currencyCode", category.getCurrencyCode()).addValue("ticketMetadata", Objects.requireNonNullElse(ticketMetadataSupplier.apply(idx.getAndIncrement()), "{}")).addValue("vatStatus", vatStatus.name())).toArray(MapSqlParameterSource[]::new);
    return (int) Arrays.stream(getNamedParameterJdbcTemplate().batchUpdate(batchReserveTickets(), batchReserveParameters)).asLongStream().sum();
}
Also used : Bind(ch.digitalfondue.npjt.Bind) java.util(java.util) TicketMetadataContainer(alfio.model.metadata.TicketMetadataContainer) QueryRepository(ch.digitalfondue.npjt.QueryRepository) ZonedDateTime(java.time.ZonedDateTime) NamedParameterJdbcTemplate(org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate) Array(alfio.model.support.Array) MapSqlParameterSource(org.springframework.jdbc.core.namedparam.MapSqlParameterSource) EnumTypeAsString(alfio.model.support.EnumTypeAsString) PollParticipant(alfio.model.poll.PollParticipant) QueryType(ch.digitalfondue.npjt.QueryType) alfio.model(alfio.model) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Query(ch.digitalfondue.npjt.Query) JSONData(alfio.model.support.JSONData) OnlineCheckInFullInfo(alfio.model.checkin.OnlineCheckInFullInfo) IntFunction(java.util.function.IntFunction) MapSqlParameterSource(org.springframework.jdbc.core.namedparam.MapSqlParameterSource) AtomicInteger(java.util.concurrent.atomic.AtomicInteger)

Aggregations

IntFunction (java.util.function.IntFunction)33 List (java.util.List)10 Collectors (java.util.stream.Collectors)10 Test (org.junit.Test)9 Arrays (java.util.Arrays)8 IOException (java.io.IOException)7 ArrayList (java.util.ArrayList)7 IntStream (java.util.stream.IntStream)7 LoggerFactory (org.slf4j.LoggerFactory)6 HashMap (java.util.HashMap)5 Map (java.util.Map)5 Objects (java.util.Objects)5 Set (java.util.Set)5 Logger (org.slf4j.Logger)5 File (java.io.File)4 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)4 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)4 Function (java.util.function.Function)4 Supplier (java.util.function.Supplier)4 BytesRef (org.apache.lucene.util.BytesRef)4