Search in sources :

Example 1 with Partition

use of bdv.img.hdf5.Partition in project java-spanner by googleapis.

the class SpannerClientTest method partitionReadTest.

@Test
public void partitionReadTest() throws Exception {
    PartitionResponse expectedResponse = PartitionResponse.newBuilder().addAllPartitions(new ArrayList<Partition>()).setTransaction(Transaction.newBuilder().build()).build();
    mockSpanner.addResponse(expectedResponse);
    PartitionReadRequest request = PartitionReadRequest.newBuilder().setSession(SessionName.of("[PROJECT]", "[INSTANCE]", "[DATABASE]", "[SESSION]").toString()).setTransaction(TransactionSelector.newBuilder().build()).setTable("table110115790").setIndex("index100346066").addAllColumns(new ArrayList<String>()).setKeySet(KeySet.newBuilder().build()).setPartitionOptions(PartitionOptions.newBuilder().build()).build();
    PartitionResponse actualResponse = client.partitionRead(request);
    Assert.assertEquals(expectedResponse, actualResponse);
    List<AbstractMessage> actualRequests = mockSpanner.getRequests();
    Assert.assertEquals(1, actualRequests.size());
    PartitionReadRequest actualRequest = ((PartitionReadRequest) actualRequests.get(0));
    Assert.assertEquals(request.getSession(), actualRequest.getSession());
    Assert.assertEquals(request.getTransaction(), actualRequest.getTransaction());
    Assert.assertEquals(request.getTable(), actualRequest.getTable());
    Assert.assertEquals(request.getIndex(), actualRequest.getIndex());
    Assert.assertEquals(request.getColumnsList(), actualRequest.getColumnsList());
    Assert.assertEquals(request.getKeySet(), actualRequest.getKeySet());
    Assert.assertEquals(request.getPartitionOptions(), actualRequest.getPartitionOptions());
    Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
Also used : PartitionResponse(com.google.spanner.v1.PartitionResponse) Partition(com.google.spanner.v1.Partition) AbstractMessage(com.google.protobuf.AbstractMessage) ArrayList(java.util.ArrayList) PartitionReadRequest(com.google.spanner.v1.PartitionReadRequest) Test(org.junit.Test)

Example 2 with Partition

use of bdv.img.hdf5.Partition in project imagej-utils by embl-cba.

the class BdvRaiXYZCTExporter method export.

public void export(RandomAccessibleInterval<T> raiXYZCT, String name, String filePathWithoutExtension, double[] calibration, String calibrationUnit, // TODO: replace by AffineTransform3D
double[] translation) {
    // below code does not save pixels at negative coordinates....
    raiXYZCT = Views.zeroMin(raiXYZCT);
    final File hdf5File = new File(filePathWithoutExtension + ".h5");
    final File xmlFile = new File(filePathWithoutExtension + ".xml");
    // set up calibration
    String pixelUnit = getPixelUnit(calibrationUnit);
    final FinalVoxelDimensions voxelSize = new FinalVoxelDimensions(pixelUnit, calibration);
    final FinalDimensions imageSize = getFinalDimensions(raiXYZCT);
    // propose reasonable mipmap settings
    final ExportMipmapInfo autoMipmapSettings = ProposeMipmaps.proposeMipmaps(new BasicViewSetup(0, "", imageSize, voxelSize));
    progressWriter.out().println("Starting export...");
    final BasicImgLoader imgLoader = new RaiXYZCTLoader(raiXYZCT, calibration, calibrationUnit);
    final int numTimePoints = (int) raiXYZCT.dimension(TIME_DIM);
    final int numChannels = (int) raiXYZCT.dimension(CHANNEL_DIM);
    final AffineTransform3D sourceTransform = getSourceTransform3D(calibration, translation);
    // write hdf5
    final HashMap<Integer, BasicViewSetup> setups = new HashMap<>(numChannels);
    for (int channelIndex = 0; channelIndex < numChannels; ++channelIndex) {
        final BasicViewSetup setup = new BasicViewSetup(channelIndex, name + String.format("_ch%d", channelIndex), imageSize, voxelSize);
        setup.setAttribute(new Channel(channelIndex));
        setups.put(channelIndex, setup);
    }
    final ArrayList<TimePoint> timePoints = new ArrayList<>(numTimePoints);
    for (int t = 0; t < numTimePoints; ++t) timePoints.add(new TimePoint(t));
    final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal(new TimePoints(timePoints), setups, imgLoader, null);
    Map<Integer, ExportMipmapInfo> perSetupExportMipmapInfo;
    perSetupExportMipmapInfo = new HashMap<>();
    final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo(autoMipmapSettings.getExportResolutions(), autoMipmapSettings.getSubdivisions());
    for (final BasicViewSetup setup : seq.getViewSetupsOrdered()) perSetupExportMipmapInfo.put(setup.getId(), mipmapInfo);
    final int numCellCreatorThreads = Math.max(1, PluginHelper.numThreads() - 1);
    ExportScalePyramid.LoopbackHeuristic loopbackHeuristic = (originalImg, factorsToOriginalImg, previousLevel, factorsToPreviousLevel, chunkSize) -> {
        if (previousLevel < 0)
            return false;
        if (Intervals.numElements(factorsToOriginalImg) / Intervals.numElements(factorsToPreviousLevel) >= 8)
            return true;
        return false;
    };
    final ExportScalePyramid.AfterEachPlane afterEachPlane = usedLoopBack -> {
    };
    final ArrayList<Partition> partitions;
    partitions = null;
    WriteSequenceToHdf5.writeHdf5File(seq, perSetupExportMipmapInfo, true, hdf5File, loopbackHeuristic, afterEachPlane, numCellCreatorThreads, new SubTaskProgressWriter(progressWriter, 0, 0.95));
    writeXml(hdf5File, xmlFile, progressWriter, numTimePoints, numChannels, sourceTransform, seq, partitions);
    progressWriter.out().println("done");
}
Also used : XmlIoSpimDataMinimal(bdv.spimdata.XmlIoSpimDataMinimal) bdv.export(bdv.export) HashMap(java.util.HashMap) UnsignedByteType(net.imglib2.type.numeric.integer.UnsignedByteType) Hdf5ImageLoader(bdv.img.hdf5.Hdf5ImageLoader) ArrayList(java.util.ArrayList) BasicImgLoader(mpicbg.spim.data.generic.sequence.BasicImgLoader) FloatType(net.imglib2.type.numeric.real.FloatType) Intervals(net.imglib2.util.Intervals) SpimDataMinimal(bdv.spimdata.SpimDataMinimal) FinalDimensions(net.imglib2.FinalDimensions) RandomAccessibleInterval(net.imglib2.RandomAccessibleInterval) BasicSetupImgLoader(mpicbg.spim.data.generic.sequence.BasicSetupImgLoader) ViewRegistrations(mpicbg.spim.data.registration.ViewRegistrations) ImgLoaderHint(mpicbg.spim.data.generic.sequence.ImgLoaderHint) Map(java.util.Map) NativeType(net.imglib2.type.NativeType) Partition(bdv.img.hdf5.Partition) ViewRegistration(mpicbg.spim.data.registration.ViewRegistration) AffineTransform3D(net.imglib2.realtransform.AffineTransform3D) Views(net.imglib2.view.Views) UnsignedShortType(net.imglib2.type.numeric.integer.UnsignedShortType) Dimensions(net.imglib2.Dimensions) PluginHelper(bdv.ij.util.PluginHelper) Util(net.imglib2.util.Util) File(java.io.File) SequenceDescriptionMinimal(bdv.spimdata.SequenceDescriptionMinimal) BasicViewSetup(mpicbg.spim.data.generic.sequence.BasicViewSetup) Converters(net.imglib2.converter.Converters) RealType(net.imglib2.type.numeric.RealType) mpicbg.spim.data.sequence(mpicbg.spim.data.sequence) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FinalDimensions(net.imglib2.FinalDimensions) BasicImgLoader(mpicbg.spim.data.generic.sequence.BasicImgLoader) AffineTransform3D(net.imglib2.realtransform.AffineTransform3D) Partition(bdv.img.hdf5.Partition) SequenceDescriptionMinimal(bdv.spimdata.SequenceDescriptionMinimal) ImgLoaderHint(mpicbg.spim.data.generic.sequence.ImgLoaderHint) File(java.io.File) BasicViewSetup(mpicbg.spim.data.generic.sequence.BasicViewSetup)

Example 3 with Partition

use of bdv.img.hdf5.Partition in project imagej-utils by embl-cba.

the class XmlIoHdf5UnsignedLongImageLoader method fromXml.

@Override
public Hdf5UnsignedLongImageLoader fromXml(final Element elem, final File basePath, final AbstractSequenceDescription<?, ?, ?> sequenceDescription) {
    final String path = loadPath(elem, "hdf5", basePath).toString();
    final ArrayList<Partition> partitions = new ArrayList<>();
    for (final Element p : elem.getChildren("partition")) partitions.add(partitionFromXml(p, basePath));
    return new Hdf5UnsignedLongImageLoader(new File(path), partitions, sequenceDescription);
}
Also used : Partition(bdv.img.hdf5.Partition) Element(org.jdom2.Element) ArrayList(java.util.ArrayList) File(java.io.File)

Example 4 with Partition

use of bdv.img.hdf5.Partition in project SQLWindowing by hbutani.

the class PTFOperator method processInputPartition.

protected void processInputPartition() throws HiveException {
    try {
        Partition outPart = Executor.executeChain(qDef, inputPart);
        Executor.executeSelectList(qDef, outPart, new ForwardPTF());
    } catch (WindowingException we) {
        throw new HiveException("Cannot close PTFOperator.", we);
    }
}
Also used : Partition(com.sap.hadoop.windowing.runtime2.Partition) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) WindowingException(com.sap.hadoop.windowing.WindowingException)

Example 5 with Partition

use of bdv.img.hdf5.Partition in project SQLWindowing by hbutani.

the class IOUtils method createPartition.

public static Partition createPartition(String partitionClass, int partitionMemSize, WindowingInput wIn) throws WindowingException {
    try {
        SerDe serDe = (SerDe) wIn.getDeserializer();
        StructObjectInspector oI = (StructObjectInspector) serDe.getObjectInspector();
        Partition p = new Partition(partitionClass, partitionMemSize, serDe, oI);
        Writable w = wIn.createRow();
        while (wIn.next(w) != -1) {
            p.append(w);
        }
        return p;
    } catch (WindowingException we) {
        throw we;
    } catch (Exception e) {
        throw new WindowingException(e);
    }
}
Also used : SerDe(org.apache.hadoop.hive.serde2.SerDe) Partition(com.sap.hadoop.windowing.runtime2.Partition) WindowingException(com.sap.hadoop.windowing.WindowingException) Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) WindowingException(com.sap.hadoop.windowing.WindowingException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

ArrayList (java.util.ArrayList)6 Partition (com.sap.hadoop.windowing.runtime2.Partition)5 Partition (bdv.img.hdf5.Partition)4 WindowingException (com.sap.hadoop.windowing.WindowingException)4 File (java.io.File)4 Hdf5ImageLoader (bdv.img.hdf5.Hdf5ImageLoader)3 SequenceDescriptionMinimal (bdv.spimdata.SequenceDescriptionMinimal)3 SpimDataMinimal (bdv.spimdata.SpimDataMinimal)3 XmlIoSpimDataMinimal (bdv.spimdata.XmlIoSpimDataMinimal)3 ViewRegistration (mpicbg.spim.data.registration.ViewRegistration)3 ViewRegistrations (mpicbg.spim.data.registration.ViewRegistrations)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 bdv.export (bdv.export)2 PluginHelper (bdv.ij.util.PluginHelper)2 AbstractMessage (com.google.protobuf.AbstractMessage)2 Partition (com.google.spanner.v1.Partition)2 PartitionResponse (com.google.spanner.v1.PartitionResponse)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 BasicViewSetup (mpicbg.spim.data.generic.sequence.BasicViewSetup)2