use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project bagheera by mozilla-metrics.
the class KafkaConsumer method poll.
@Override
public void poll() {
final CountDownLatch latch = new CountDownLatch(streams.size());
for (final KafkaStream<Message> stream : streams) {
workers.add(executor.submit(new Callable<Void>() {
@Override
public Void call() {
try {
for (MessageAndMetadata<Message> mam : stream) {
BagheeraMessage bmsg = BagheeraMessage.parseFrom(ByteString.copyFrom(mam.message().payload()));
// get the sink for this message's namespace
// (typically only one sink unless a regex pattern was used to listen to multiple topics)
KeyValueSink sink = sinkFactory.getSink(bmsg.getNamespace());
if (sink == null) {
LOG.error("Could not obtain sink for namespace: " + bmsg.getNamespace());
break;
}
if (bmsg.getOperation() == Operation.CREATE_UPDATE && bmsg.hasId() && bmsg.hasPayload()) {
if (validationPipeline == null || validationPipeline.isValid(bmsg.getPayload().toByteArray())) {
if (bmsg.hasTimestamp()) {
sink.store(bmsg.getId(), bmsg.getPayload().toByteArray(), bmsg.getTimestamp());
} else {
sink.store(bmsg.getId(), bmsg.getPayload().toByteArray());
}
} else {
invalidMessageMeter.mark();
// TODO: sample out an example payload
LOG.warn("Invalid payload for namespace: " + bmsg.getNamespace());
}
} else if (bmsg.getOperation() == Operation.DELETE && bmsg.hasId()) {
sink.delete(bmsg.getId());
}
consumed.mark();
}
} catch (InvalidProtocolBufferException e) {
LOG.error("Invalid protocol buffer in data stream", e);
} catch (UnsupportedEncodingException e) {
LOG.error("Message ID was not in UTF-8 encoding", e);
} catch (IOException e) {
LOG.error("IO error while storing to data sink", e);
} finally {
latch.countDown();
}
return null;
}
}));
}
// run indefinitely unless we detect that a thread exited
try {
while (true) {
latch.await(10, TimeUnit.SECONDS);
if (latch.getCount() != streams.size()) {
// we have a dead thread and should exit
break;
}
}
} catch (InterruptedException e) {
LOG.info("Interrupted during polling", e);
}
// Spit out errors if there were any
for (Future<Void> worker : workers) {
try {
if (worker.isDone() && !worker.isCancelled()) {
worker.get(1, TimeUnit.SECONDS);
}
} catch (InterruptedException e) {
LOG.error("Thread was interrupted:", e);
} catch (ExecutionException e) {
LOG.error("Exception occured in thread:", e);
} catch (TimeoutException e) {
LOG.error("Timed out waiting for thread result:", e);
} catch (CancellationException e) {
LOG.error("Thread has been canceled: ", e);
}
}
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project GDSC-SMLM by aherbert.
the class PeakResultsReader method getCalibration.
/**
* Gets the calibration specified in the results header.
*
* @return The calibration specified in the results header.
*/
@SuppressWarnings("deprecation")
public Calibration getCalibration() {
if (calibration == null) {
getHeader();
if (header != null && header.length() > 0) {
if (format == FileFormat.RAPID_STORM) {
calibration.setDistanceUnit(DistanceUnit.NM);
// RapidSTORM has a resolution attribute in the header in units of px m^-1
final Pattern pattern = Pattern.compile("resolution=\"([^ ]+) px m");
final Matcher match = pattern.matcher(header);
if (match.find()) {
try {
final float resolution = Float.parseFloat(match.group(1));
if (Double.isFinite(resolution) && resolution > 0) {
final double nmPerPixel = (float) (1e9 / resolution);
calibration = new CalibrationWriter();
calibration.setNmPerPixel(nmPerPixel);
}
} catch (final NumberFormatException ex) {
// Ignore
}
}
} else {
final String calibrationString = getField("Calibration");
if (calibrationString != null && calibrationString.length() > 0) {
// Older formats used XML
if (calibrationString.startsWith("<")) {
// Convert the XML back
try {
// Support package gdsc.smlm renamed to uk.ac.sussex.gdsc.smlm
final uk.ac.sussex.gdsc.smlm.results.Calibration cal = (uk.ac.sussex.gdsc.smlm.results.Calibration) XStreamUtils.fromXml(XStreamUtils.updateGdscPackageName(calibrationString));
cal.validate();
// Convert to a calibration helper
calibration = new CalibrationWriter();
if (cal.hasNmPerPixel()) {
calibration.setNmPerPixel(cal.getNmPerPixel());
}
if (cal.hasGain()) {
calibration.setCountPerPhoton(cal.getGain());
}
if (cal.hasExposureTime()) {
calibration.setExposureTime(cal.getExposureTime());
}
if (cal.hasReadNoise()) {
calibration.setReadNoise(cal.getReadNoise());
}
if (cal.hasBias()) {
calibration.setBias(cal.getBias());
}
if (cal.emCCD) {
calibration.setCameraType(CameraType.EMCCD);
}
if (cal.hasAmplification() && cal.hasGain()) {
calibration.setQuantumEfficiency(cal.getGain() / cal.getAmplification());
}
// Previous version were always in fixed units
calibration.setDistanceUnit(DistanceUnit.PIXEL);
calibration.setIntensityUnit(IntensityUnit.COUNT);
calibration.setAngleUnit(AngleUnit.DEGREE);
calibration.setTimeUnit(TimeUnit.FRAME);
} catch (final Exception ex) {
logger.log(Level.WARNING, "Unable to deserialise the Calibration settings", ex);
}
} else {
// Assume JSON format
try {
final Calibration.Builder calibrationBuilder = Calibration.newBuilder();
JsonFormat.parser().merge(calibrationString, calibrationBuilder);
calibration = new CalibrationWriter(calibrationBuilder);
// Old results did not save the time unit
if (calibration.getTimeUnitValue() == TimeUnit.TIME_UNIT_NA_VALUE) {
calibration.setTimeUnit(TimeUnit.FRAME);
}
} catch (final InvalidProtocolBufferException ex) {
logger.log(Level.WARNING, "Unable to deserialise the Calibration settings", ex);
}
}
}
if (format == FileFormat.MALK) {
if (calibration == null) {
calibration = new CalibrationWriter();
}
calibration.setDistanceUnit(DistanceUnit.NM);
calibration.setIntensityUnit(IntensityUnit.PHOTON);
calibration.setTimeUnit(TimeUnit.FRAME);
}
}
}
// Calibration is a smart object so we can create an empty one
if (calibration == null) {
calibration = new CalibrationWriter();
}
}
return calibration.getCalibration();
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project GDSC-SMLM by aherbert.
the class TsfPeakResultsWriter method createSpotList.
private SpotList createSpotList() {
final SpotList.Builder builder = SpotList.newBuilder();
builder.setApplicationId(APPLICATION_ID);
builder.setNrSpots(size);
// Add the standard details the TSF supports. We use extensions to add GDSC SMLM data.
if (!TextUtils.isNullOrEmpty(getName())) {
builder.setName(getName());
}
if (getSource() != null) {
builder.setNrPixelsX(getSource().width);
builder.setNrPixelsY(getSource().height);
builder.setNrFrames(getSource().frames);
builder.setSource(singleLine(getSource().toXml()));
}
if (getBounds() != null) {
final ROI.Builder roiBuilder = builder.getRoiBuilder();
roiBuilder.setX(getBounds().x);
roiBuilder.setY(getBounds().y);
roiBuilder.setXWidth(getBounds().width);
roiBuilder.setYWidth(getBounds().height);
builder.setRoi(roiBuilder.build());
}
if (hasCalibration()) {
final CalibrationReader cr = getCalibrationReader();
if (cr.hasNmPerPixel()) {
builder.setPixelSize((float) cr.getNmPerPixel());
}
if (cr.hasExposureTime()) {
builder.setExposureTime(cr.getExposureTime());
}
if (cr.hasReadNoise()) {
builder.setReadNoise(cr.getReadNoise());
}
if (cr.hasBias()) {
builder.setBias(cr.getBias());
}
if (cr.hasCameraType()) {
builder.setCameraType(cameraTypeMap[cr.getCameraType().ordinal()]);
}
if (cr.hasDistanceUnit()) {
builder.setLocationUnits(locationUnitsMap[cr.getDistanceUnit().ordinal()]);
}
if (cr.hasIntensityUnit()) {
builder.setIntensityUnits(intensityUnitsMap[cr.getIntensityUnit().ordinal()]);
}
if (cr.hasAngleUnit()) {
builder.setThetaUnits(thetaUnitsMap[cr.getAngleUnit().ordinal()]);
}
// We can use some logic here to get the QE
if (cr.hasCountPerPhoton()) {
builder.setGain(cr.getCountPerPhoton());
final double qe = (cr.hasQuantumEfficiency()) ? cr.getQuantumEfficiency() : 1;
// e-/photon / count/photon => e-/count
final double ecf = qe / cr.getCountPerPhoton();
builder.addEcf(ecf);
builder.addQe(qe);
}
}
if (!TextUtils.isNullOrEmpty(getConfiguration())) {
builder.setConfiguration(singleLine(getConfiguration()));
}
if (getPsf() != null) {
try {
final Printer printer = JsonFormat.printer().omittingInsignificantWhitespace();
builder.setPSF(printer.print(getPsf()));
} catch (final InvalidProtocolBufferException ex) {
// This shouldn't happen so throw it
throw new NotImplementedException("Unable to serialise the PSF settings", ex);
}
}
// Have a property so the boxSize can be set
if (boxSize > 0) {
builder.setBoxSize(boxSize);
}
builder.setFitMode(fitMode);
final FluorophoreType.Builder typeBuilder = FluorophoreType.newBuilder();
typeBuilder.setId(1);
typeBuilder.setDescription("Default fluorophore");
typeBuilder.setIsFiducial(false);
builder.addFluorophoreTypes(typeBuilder.build());
return builder.build();
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project GDSC-SMLM by aherbert.
the class TsfPeakResultsReader method createResults.
private MemoryPeakResults createResults() {
// Limit the capacity since we may not need all the spots
int capacity = 1000;
if (spotList.hasNrSpots()) {
capacity = (int) Math.min(100000, spotList.getNrSpots());
}
final MemoryPeakResults results = new MemoryPeakResults(capacity);
// Create the type of Gaussian PSF
if (spotList.hasFitMode()) {
switch(spotList.getFitMode()) {
case ONEAXIS:
results.setPsf(PsfHelper.create(PSFType.ONE_AXIS_GAUSSIAN_2D));
break;
case TWOAXIS:
results.setPsf(PsfHelper.create(PSFType.TWO_AXIS_GAUSSIAN_2D));
break;
case TWOAXISANDTHETA:
results.setPsf(PsfHelper.create(PSFType.TWO_AXIS_AND_THETA_GAUSSIAN_2D));
break;
default:
break;
}
}
// Generic reconstruction
String name;
if (spotList.hasName()) {
name = spotList.getName();
} else {
name = FileUtils.getName(filename);
}
// Append these if not using the defaults
if (channel != 1 || slice != 0 || position != 0 || fluorophoreType != 1) {
name = String.format("%s c=%d, s=%d, p=%d, ft=%d", name, channel, slice, position, fluorophoreType);
}
results.setName(name);
// if (spotList.hasNrPixelsX() && spotList.hasNrPixelsY())
// {
// // Do not do this. The size of the camera may not map to the data bounds due
// // to the support for position offsets.
// results.setBounds(new Rectangle(0, 0, spotList.getNrPixelsX(), spotList.getNrPixelsY()));
// }
final CalibrationWriter cal = new CalibrationWriter();
// Spots are associated with frames
cal.setTimeUnit(TimeUnit.FRAME);
if (spotList.hasPixelSize()) {
cal.setNmPerPixel(spotList.getPixelSize());
}
if (spotList.getEcfCount() >= channel) {
// ECF is per channel
final double ecf = spotList.getEcf(channel - 1);
// QE is per fluorophore type
final double qe = (spotList.getQeCount() >= fluorophoreType) ? spotList.getQe(fluorophoreType - 1) : 1;
// e-/photon / e-/count => count/photon
cal.setCountPerPhoton(qe / ecf);
cal.setQuantumEfficiency(qe);
}
if (isGdsc) {
if (spotList.hasSource()) {
// Deserialise
results.setSource(ImageSource.fromXml(spotList.getSource()));
}
if (spotList.hasRoi()) {
final ROI roi = spotList.getRoi();
if (roi.hasX() && roi.hasY() && roi.hasXWidth() && roi.hasYWidth()) {
results.setBounds(new Rectangle(roi.getX(), roi.getY(), roi.getXWidth(), roi.getYWidth()));
}
}
if (spotList.hasGain()) {
cal.setCountPerPhoton(spotList.getGain());
}
if (spotList.hasExposureTime()) {
cal.setExposureTime(spotList.getExposureTime());
}
if (spotList.hasReadNoise()) {
cal.setReadNoise(spotList.getReadNoise());
}
if (spotList.hasBias()) {
cal.setBias(spotList.getBias());
}
if (spotList.hasCameraType()) {
cal.setCameraType(cameraTypeMap.get(spotList.getCameraType()));
} else {
cal.setCameraType(null);
}
if (spotList.hasConfiguration()) {
results.setConfiguration(spotList.getConfiguration());
}
// Allow restoring the GDSC PSF exactly
if (spotList.hasPSF()) {
try {
final Parser parser = JsonFormat.parser();
final PSF.Builder psfBuilder = PSF.newBuilder();
parser.merge(spotList.getPSF(), psfBuilder);
results.setPsf(psfBuilder.build());
} catch (final InvalidProtocolBufferException ex) {
logger.warning("Unable to deserialise the PSF settings");
}
}
}
if (spotList.hasLocationUnits()) {
cal.setDistanceUnit(locationUnitsMap.get(spotList.getLocationUnits()));
if (!spotList.hasPixelSize() && spotList.getLocationUnits() != LocationUnits.PIXELS) {
logger.warning(() -> "TSF location units are not pixels and no pixel size calibration is available." + " The dataset will be constructed in the native units: " + spotList.getLocationUnits());
}
} else {
cal.setDistanceUnit(null);
}
if (spotList.hasIntensityUnits()) {
cal.setIntensityUnit(intensityUnitsMap.get(spotList.getIntensityUnits()));
if (!spotList.hasGain() && spotList.getIntensityUnits() != IntensityUnits.COUNTS) {
logger.warning(() -> "TSF intensity units are not counts and no gain calibration is available." + " The dataset will be constructed in the native units: " + spotList.getIntensityUnits());
}
} else {
cal.setIntensityUnit(null);
}
if (spotList.hasThetaUnits()) {
cal.setAngleUnit(thetaUnitsMap.get(spotList.getThetaUnits()));
} else {
cal.setAngleUnit(null);
}
results.setCalibration(cal.getCalibration());
return results;
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project graylog2-server by Graylog2.
the class NetFlowCodec method decodeMessages.
@Nullable
@Override
public Collection<Message> decodeMessages(@Nonnull RawMessage rawMessage) {
try {
final ResolvableInetSocketAddress remoteAddress = rawMessage.getRemoteAddress();
final InetSocketAddress sender = remoteAddress != null ? remoteAddress.getInetSocketAddress() : null;
final byte[] payload = rawMessage.getPayload();
if (payload.length < 3) {
LOG.debug("NetFlow message (source: {}) doesn't even fit the NetFlow version (size: {} bytes)", sender, payload.length);
return null;
}
final ByteBuf buffer = Unpooled.wrappedBuffer(payload);
switch(buffer.readByte()) {
case PASSTHROUGH_MARKER:
final NetFlowV5Packet netFlowV5Packet = NetFlowV5Parser.parsePacket(buffer);
return netFlowV5Packet.records().stream().map(record -> NetFlowFormatter.toMessage(netFlowV5Packet.header(), record, sender)).collect(Collectors.toList());
case ORDERED_V9_MARKER:
// our "custom" netflow v9 that has all the templates in the same packet
return decodeV9(sender, buffer);
default:
final List<RawMessage.SourceNode> sourceNodes = rawMessage.getSourceNodes();
final RawMessage.SourceNode sourceNode = sourceNodes.isEmpty() ? null : sourceNodes.get(sourceNodes.size() - 1);
final String inputId = sourceNode == null ? "<unknown>" : sourceNode.inputId;
LOG.warn("Unsupported NetFlow packet on input {} (source: {})", inputId, sender);
return null;
}
} catch (FlowException e) {
LOG.error("Error parsing NetFlow packet <{}> received from <{}>", rawMessage.getId(), rawMessage.getRemoteAddress(), e);
if (LOG.isDebugEnabled()) {
LOG.debug("NetFlow packet hexdump:\n{}", ByteBufUtil.prettyHexDump(Unpooled.wrappedBuffer(rawMessage.getPayload())));
}
return null;
} catch (InvalidProtocolBufferException e) {
LOG.error("Invalid NetFlowV9 entry found, cannot parse the messages", ExceptionUtils.getRootCause(e));
return null;
}
}
Aggregations