use of bdv.img.hdf5.Partition in project java-spanner by googleapis.
the class SpannerClientTest method partitionReadTest.
@Test
public void partitionReadTest() throws Exception {
PartitionResponse expectedResponse = PartitionResponse.newBuilder().addAllPartitions(new ArrayList<Partition>()).setTransaction(Transaction.newBuilder().build()).build();
mockSpanner.addResponse(expectedResponse);
PartitionReadRequest request = PartitionReadRequest.newBuilder().setSession(SessionName.of("[PROJECT]", "[INSTANCE]", "[DATABASE]", "[SESSION]").toString()).setTransaction(TransactionSelector.newBuilder().build()).setTable("table110115790").setIndex("index100346066").addAllColumns(new ArrayList<String>()).setKeySet(KeySet.newBuilder().build()).setPartitionOptions(PartitionOptions.newBuilder().build()).build();
PartitionResponse actualResponse = client.partitionRead(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockSpanner.getRequests();
Assert.assertEquals(1, actualRequests.size());
PartitionReadRequest actualRequest = ((PartitionReadRequest) actualRequests.get(0));
Assert.assertEquals(request.getSession(), actualRequest.getSession());
Assert.assertEquals(request.getTransaction(), actualRequest.getTransaction());
Assert.assertEquals(request.getTable(), actualRequest.getTable());
Assert.assertEquals(request.getIndex(), actualRequest.getIndex());
Assert.assertEquals(request.getColumnsList(), actualRequest.getColumnsList());
Assert.assertEquals(request.getKeySet(), actualRequest.getKeySet());
Assert.assertEquals(request.getPartitionOptions(), actualRequest.getPartitionOptions());
Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
use of bdv.img.hdf5.Partition in project imagej-utils by embl-cba.
the class BdvRaiXYZCTExporter method export.
public void export(RandomAccessibleInterval<T> raiXYZCT, String name, String filePathWithoutExtension, double[] calibration, String calibrationUnit, // TODO: replace by AffineTransform3D
double[] translation) {
// below code does not save pixels at negative coordinates....
raiXYZCT = Views.zeroMin(raiXYZCT);
final File hdf5File = new File(filePathWithoutExtension + ".h5");
final File xmlFile = new File(filePathWithoutExtension + ".xml");
// set up calibration
String pixelUnit = getPixelUnit(calibrationUnit);
final FinalVoxelDimensions voxelSize = new FinalVoxelDimensions(pixelUnit, calibration);
final FinalDimensions imageSize = getFinalDimensions(raiXYZCT);
// propose reasonable mipmap settings
final ExportMipmapInfo autoMipmapSettings = ProposeMipmaps.proposeMipmaps(new BasicViewSetup(0, "", imageSize, voxelSize));
progressWriter.out().println("Starting export...");
final BasicImgLoader imgLoader = new RaiXYZCTLoader(raiXYZCT, calibration, calibrationUnit);
final int numTimePoints = (int) raiXYZCT.dimension(TIME_DIM);
final int numChannels = (int) raiXYZCT.dimension(CHANNEL_DIM);
final AffineTransform3D sourceTransform = getSourceTransform3D(calibration, translation);
// write hdf5
final HashMap<Integer, BasicViewSetup> setups = new HashMap<>(numChannels);
for (int channelIndex = 0; channelIndex < numChannels; ++channelIndex) {
final BasicViewSetup setup = new BasicViewSetup(channelIndex, name + String.format("_ch%d", channelIndex), imageSize, voxelSize);
setup.setAttribute(new Channel(channelIndex));
setups.put(channelIndex, setup);
}
final ArrayList<TimePoint> timePoints = new ArrayList<>(numTimePoints);
for (int t = 0; t < numTimePoints; ++t) timePoints.add(new TimePoint(t));
final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal(new TimePoints(timePoints), setups, imgLoader, null);
Map<Integer, ExportMipmapInfo> perSetupExportMipmapInfo;
perSetupExportMipmapInfo = new HashMap<>();
final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo(autoMipmapSettings.getExportResolutions(), autoMipmapSettings.getSubdivisions());
for (final BasicViewSetup setup : seq.getViewSetupsOrdered()) perSetupExportMipmapInfo.put(setup.getId(), mipmapInfo);
final int numCellCreatorThreads = Math.max(1, PluginHelper.numThreads() - 1);
ExportScalePyramid.LoopbackHeuristic loopbackHeuristic = (originalImg, factorsToOriginalImg, previousLevel, factorsToPreviousLevel, chunkSize) -> {
if (previousLevel < 0)
return false;
if (Intervals.numElements(factorsToOriginalImg) / Intervals.numElements(factorsToPreviousLevel) >= 8)
return true;
return false;
};
final ExportScalePyramid.AfterEachPlane afterEachPlane = usedLoopBack -> {
};
final ArrayList<Partition> partitions;
partitions = null;
WriteSequenceToHdf5.writeHdf5File(seq, perSetupExportMipmapInfo, true, hdf5File, loopbackHeuristic, afterEachPlane, numCellCreatorThreads, new SubTaskProgressWriter(progressWriter, 0, 0.95));
writeXml(hdf5File, xmlFile, progressWriter, numTimePoints, numChannels, sourceTransform, seq, partitions);
progressWriter.out().println("done");
}
use of bdv.img.hdf5.Partition in project imagej-utils by embl-cba.
the class XmlIoHdf5UnsignedLongImageLoader method fromXml.
@Override
public Hdf5UnsignedLongImageLoader fromXml(final Element elem, final File basePath, final AbstractSequenceDescription<?, ?, ?> sequenceDescription) {
final String path = loadPath(elem, "hdf5", basePath).toString();
final ArrayList<Partition> partitions = new ArrayList<>();
for (final Element p : elem.getChildren("partition")) partitions.add(partitionFromXml(p, basePath));
return new Hdf5UnsignedLongImageLoader(new File(path), partitions, sequenceDescription);
}
use of bdv.img.hdf5.Partition in project SQLWindowing by hbutani.
the class PTFOperator method processInputPartition.
protected void processInputPartition() throws HiveException {
try {
Partition outPart = Executor.executeChain(qDef, inputPart);
Executor.executeSelectList(qDef, outPart, new ForwardPTF());
} catch (WindowingException we) {
throw new HiveException("Cannot close PTFOperator.", we);
}
}
use of bdv.img.hdf5.Partition in project SQLWindowing by hbutani.
the class IOUtils method createPartition.
public static Partition createPartition(String partitionClass, int partitionMemSize, WindowingInput wIn) throws WindowingException {
try {
SerDe serDe = (SerDe) wIn.getDeserializer();
StructObjectInspector oI = (StructObjectInspector) serDe.getObjectInspector();
Partition p = new Partition(partitionClass, partitionMemSize, serDe, oI);
Writable w = wIn.createRow();
while (wIn.next(w) != -1) {
p.append(w);
}
return p;
} catch (WindowingException we) {
throw we;
} catch (Exception e) {
throw new WindowingException(e);
}
}
Aggregations