use of java.util.stream.IntStream in project mixcr by milaboratory.
the class ActionExportCloneReads method go.
@Override
public void go(ActionHelper helper) throws Exception {
try (ClnAReader clna = new ClnAReader(parameters.getInputFileName(), VDJCLibraryRegistry.createDefaultRegistry())) {
VDJCAlignments firstAlignment = clna.readAllAlignments().take();
if (firstAlignment == null)
return;
if (firstAlignment.getOriginalReads() == null)
throw new ParameterException("Error: original reads were not saved in the .vdjca file: " + "re-run align with '-g' option.");
int[] cid = parameters.getCloneIds();
Supplier<IntStream> cloneIds;
if (cid == null)
cloneIds = () -> IntStream.range(0, clna.numberOfClones());
else
cloneIds = () -> IntStream.of(cid);
long totalAlignments = cloneIds.get().mapToLong(clna::numberOfAlignmentsInClone).sum();
AtomicLong alignmentsWritten = new AtomicLong();
AtomicBoolean finished = new AtomicBoolean(false);
SmartProgressReporter.startProgressReport("Writing reads", new CanReportProgress() {
@Override
public double getProgress() {
return 1.0 * alignmentsWritten.get() / totalAlignments;
}
@Override
public boolean isFinished() {
return finished.get();
}
});
boolean paired = firstAlignment.getOriginalReads().get(0).numberOfReads() == 2;
boolean separate = parameters.doSeparate();
SequenceWriter globalWriter = separate ? null : createWriter(paired, parameters.getOutputFileName());
cloneIds.get().forEach(cloneId -> {
try (SequenceWriter individualWriter = globalWriter == null ? createWriter(paired, cloneFile(parameters.getOutputFileName(), cloneId)) : null) {
SequenceWriter actualWriter = globalWriter == null ? individualWriter : globalWriter;
for (VDJCAlignments alignments : CUtils.it(clna.readAlignmentsOfClone(cloneId))) {
for (SequenceRead read : alignments.getOriginalReads()) actualWriter.write(read);
alignmentsWritten.incrementAndGet();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
}
use of java.util.stream.IntStream in project com-liferay-apio-architect by liferay.
the class PersonModel method compute.
/**
* Computes the fake data for this model class.
*/
public static void compute() {
if (!_personModels.isEmpty()) {
return;
}
for (long index = 0; index < 10; index++) {
Faker faker = new Faker();
Address address = faker.address();
Internet internet = faker.internet();
DateAndTime dateAndTime = faker.date();
Calendar calendar = Calendar.getInstance();
calendar.add(YEAR, -21);
Date birthDate = dateAndTime.past(10000, DAYS, calendar.getTime());
Name name = faker.name();
RandomService randomService = faker.random();
IntStream intStream = IntStream.range(0, randomService.nextInt(5));
List<String> jobTitles = intStream.mapToObj(__ -> name.title()).collect(Collectors.toList());
PersonModel personModel = new PersonModel(address.fullAddress(), internet.avatar(), birthDate, internet.safeEmailAddress(), name.firstName(), jobTitles, name.lastName(), _count.get());
_personModels.put(_count.getAndIncrement(), personModel);
}
}
use of java.util.stream.IntStream in project registry by hortonworks.
the class SlotSynchronizerTest method testCountSlots.
@Test
public void testCountSlots() throws Exception {
SlotSynchronizer<Integer> slotSynchronizer = new SlotSynchronizer<>();
int count = new Random().nextInt(1000) + 1;
Collection<SlotSynchronizer<Integer>.Lock> locks = new ArrayList<>();
IntStream keysStream = IntStream.range(0, count);
keysStream.forEach(value -> locks.add(slotSynchronizer.lockSlot(value)));
Assert.assertEquals(count, slotSynchronizer.occupiedSlots());
locks.forEach(lock -> lock.unlock());
Assert.assertEquals(0, slotSynchronizer.occupiedSlots());
}
use of java.util.stream.IntStream in project GDSC-SMLM by aherbert.
the class PeakFit method runMaximaFitting.
/**
* Load the selected results from memory. All multiple frame results are added directly to the
* results. All single frame results are added to a list of candidate maxima per frame and fitted
* using the configured parameters.
*/
private void runMaximaFitting() {
final MemoryPeakResults memoryResults = ResultsManager.loadInputResults(settings.inputOption, false, DistanceUnit.PIXEL);
if (memoryResults == null || memoryResults.size() == 0) {
log("No results for maxima fitting");
return;
}
// The total frames (for progress reporting)
int totalFrames;
// A function that can convert a frame into a set of candidate indices
final IntFunction<int[]> frameToMaxIndices;
// The frames to process (should be sorted ascending)
Supplier<IntStream> frames;
// Support fitting all time frames with the same results.
if (settings.fitAcrossAllFrames) {
// Check if the input spans multiple frames
if (getSingleFrame(memoryResults) == 0) {
final int min = memoryResults.getMinFrame();
final int max = memoryResults.getMaxFrame();
final GenericDialog gd = new GenericDialog(TITLE);
gd.enableYesNoCancel();
gd.hideCancelButton();
ImageJUtils.addMessage(gd, "Candidate maxima for fitting span multiple frames (%d-%d).\n \n" + "Please confirm the %s are correct.", min, max, TextUtils.pleural(memoryResults.size(), "candidate"));
gd.showDialog();
if (!gd.wasOKed()) {
return;
}
}
final int[] maxIndices = getMaxIndices(Arrays.asList(memoryResults.toArray()));
// This may not work correctly if using for example a series image source that
// incorrectly estimates the number of frames
totalFrames = source.getFrames();
frameToMaxIndices = frame -> maxIndices;
frames = () -> IntStream.rangeClosed(1, totalFrames);
} else {
// Build a map between the time-frame and the results in that frame.
final Map<Integer, List<PeakResult>> map = Arrays.stream(memoryResults.toArray()).parallel().filter(peakResult -> peakResult.getFrame() == peakResult.getEndFrame()).collect(Collectors.groupingBy(PeakResult::getFrame));
totalFrames = map.size();
// Build a function that can convert a frame into a set of candidate indices
frameToMaxIndices = frame -> getMaxIndices(map.get(frame));
frames = () -> map.keySet().stream().mapToInt(Integer::intValue).sorted();
}
final ImageStack stack = (extraSettings.showProcessedFrames) ? new ImageStack(bounds.width, bounds.height) : null;
// Use the FitEngine to allow multi-threading.
final FitEngine engine = createFitEngine(getNumberOfThreads(totalFrames));
if (engine == null) {
return;
}
final int step = ImageJUtils.getProgressInterval(totalFrames);
// No crop bounds are supported.
// To pre-process data for noise estimation
final boolean isFitCameraCounts = fitConfig.isFitCameraCounts();
final CameraModel cameraModel = fitConfig.getCameraModel();
runTime = System.nanoTime();
final AtomicBoolean shutdown = new AtomicBoolean();
final String format = String.format("Slice: %%d / %d (Results=%%d)", totalFrames);
frames.get().forEachOrdered(slice -> {
if (shutdown.get() || escapePressed()) {
shutdown.set(true);
return;
}
final float[] data = source.get(slice);
if (data == null) {
shutdown.set(true);
return;
}
if (slice % step == 0) {
if (ImageJUtils.showStatus(() -> String.format(format, slice, results.size()))) {
IJ.showProgress(slice, totalFrames);
}
}
// We must pre-process the data before noise estimation
final float[] data2 = data.clone();
if (isFitCameraCounts) {
cameraModel.removeBias(data2);
} else {
cameraModel.removeBiasAndGain(data2);
}
final float noise = FitWorker.estimateNoise(data2, source.getWidth(), source.getHeight(), config.getNoiseMethod());
if (stack != null) {
stack.addSlice(String.format("Frame %d - %d", source.getStartFrameNumber(), source.getEndFrameNumber()), data);
}
// Get the frame number from the source to allow for interlaced and aggregated data
engine.run(createMaximaFitJob(frameToMaxIndices.apply(slice), source.getStartFrameNumber(), source.getEndFrameNumber(), data, bounds, noise));
});
engine.end(shutdown.get());
time = engine.getTime();
runTime = System.nanoTime() - runTime;
if (stack != null) {
ImageJUtils.display("Processed frames", stack);
}
showResults();
source.close();
}
use of java.util.stream.IntStream in project flink by apache.
the class KafkaConnectorOptionsUtil method createValueFormatProjection.
/**
* Creates an array of indices that determine which physical fields of the table schema to
* include in the value format.
*
* <p>See {@link KafkaConnectorOptions#VALUE_FORMAT}, {@link
* KafkaConnectorOptions#VALUE_FIELDS_INCLUDE}, and {@link
* KafkaConnectorOptions#KEY_FIELDS_PREFIX} for more information.
*/
public static int[] createValueFormatProjection(ReadableConfig options, DataType physicalDataType) {
final LogicalType physicalType = physicalDataType.getLogicalType();
Preconditions.checkArgument(physicalType.is(LogicalTypeRoot.ROW), "Row data type expected.");
final int physicalFieldCount = LogicalTypeChecks.getFieldCount(physicalType);
final IntStream physicalFields = IntStream.range(0, physicalFieldCount);
final String keyPrefix = options.getOptional(KEY_FIELDS_PREFIX).orElse("");
final ValueFieldsStrategy strategy = options.get(VALUE_FIELDS_INCLUDE);
if (strategy == ValueFieldsStrategy.ALL) {
if (keyPrefix.length() > 0) {
throw new ValidationException(String.format("A key prefix is not allowed when option '%s' is set to '%s'. " + "Set it to '%s' instead to avoid field overlaps.", VALUE_FIELDS_INCLUDE.key(), ValueFieldsStrategy.ALL, ValueFieldsStrategy.EXCEPT_KEY));
}
return physicalFields.toArray();
} else if (strategy == ValueFieldsStrategy.EXCEPT_KEY) {
final int[] keyProjection = createKeyFormatProjection(options, physicalDataType);
return physicalFields.filter(pos -> IntStream.of(keyProjection).noneMatch(k -> k == pos)).toArray();
}
throw new TableException("Unknown value fields strategy:" + strategy);
}
Aggregations