use of org.bytedeco.javacpp.FlyCapture2.Error in project kie-wb-common by kiegroup.
the class Bpmn2JsonUnmarshaller method setCatchEventsInfoForLanes.
public void setCatchEventsInfoForLanes(Lane lane, Definitions def, List<Signal> toAddSignals, Set<Error> toAddErrors, Set<Escalation> toAddEscalations, Set<Message> toAddMessages, Set<ItemDefinition> toAddItemDefinitions) {
List<FlowNode> laneFlowNodes = lane.getFlowNodeRefs();
for (FlowElement fe : laneFlowNodes) {
if (fe instanceof CatchEvent) {
if (((CatchEvent) fe).getEventDefinitions().size() > 0) {
EventDefinition ed = ((CatchEvent) fe).getEventDefinitions().get(0);
if (ed instanceof SignalEventDefinition) {
SignalEventDefinition sed = (SignalEventDefinition) ed;
if (sed.getSignalRef() != null && sed.getSignalRef().length() > 0) {
String signalRef = sed.getSignalRef();
boolean shouldAddSignal = true;
List<RootElement> rootElements = def.getRootElements();
for (RootElement re : rootElements) {
if (re instanceof Signal) {
if (((Signal) re).getName().equals(signalRef)) {
shouldAddSignal = false;
break;
}
}
}
if (toAddSignals != null) {
for (Signal s : toAddSignals) {
if (s.getName().equals(signalRef)) {
shouldAddSignal = false;
break;
}
}
}
if (shouldAddSignal) {
Signal signal = Bpmn2Factory.eINSTANCE.createSignal();
signal.setName(signalRef);
toAddSignals.add(signal);
}
}
} else if (ed instanceof ErrorEventDefinition) {
String errorCode = null;
String errorId = null;
Iterator<FeatureMap.Entry> iter = ed.getAnyAttribute().iterator();
while (iter.hasNext()) {
FeatureMap.Entry entry = iter.next();
if (entry.getEStructuralFeature().getName().equals("erefname")) {
errorId = (String) entry.getValue();
errorCode = (String) entry.getValue();
}
}
Error err = this._errors.get(errorCode);
if (err == null) {
err = Bpmn2Factory.eINSTANCE.createError();
err.setId(errorId);
err.setErrorCode(errorCode);
this._errors.put(errorCode, err);
}
toAddErrors.add(err);
((ErrorEventDefinition) ed).setErrorRef(err);
} else if (ed instanceof EscalationEventDefinition) {
String escalationCode = null;
Iterator<FeatureMap.Entry> iter = ed.getAnyAttribute().iterator();
while (iter.hasNext()) {
FeatureMap.Entry entry = iter.next();
if (entry.getEStructuralFeature().getName().equals("esccode")) {
escalationCode = (String) entry.getValue();
break;
}
}
Escalation escalation = this._escalations.get(escalationCode);
if (escalation == null) {
escalation = Bpmn2Factory.eINSTANCE.createEscalation();
escalation.setEscalationCode(escalationCode);
this._escalations.put(escalationCode, escalation);
}
toAddEscalations.add(escalation);
((EscalationEventDefinition) ed).setEscalationRef(escalation);
} else if (ed instanceof MessageEventDefinition) {
((MessageEventDefinition) ed).setMessageRef(extractMessage(ed, toAddMessages, toAddItemDefinitions));
} else if (ed instanceof CompensateEventDefinition) {
Iterator<FeatureMap.Entry> iter = ed.getAnyAttribute().iterator();
while (iter.hasNext()) {
FeatureMap.Entry entry = iter.next();
if (entry.getEStructuralFeature().getName().equals("actrefname")) {
String activityNameRef = (String) entry.getValue();
// we have to iterate again through all flow elements
// in order to find our activity name
List<RootElement> re = def.getRootElements();
for (RootElement r : re) {
if (r instanceof Process) {
Process p = (Process) r;
List<FlowElement> fes = p.getFlowElements();
for (FlowElement f : fes) {
if (f instanceof Activity && ((Activity) f).getName().equals(activityNameRef)) {
((CompensateEventDefinition) ed).setActivityRef((Activity) f);
((Activity) f).setIsForCompensation(true);
}
}
}
}
}
}
}
}
} else if (fe instanceof FlowElementsContainer) {
setCatchEventsInfo((FlowElementsContainer) fe, def, toAddSignals, toAddErrors, toAddEscalations, toAddMessages, toAddItemDefinitions);
}
}
}
use of org.bytedeco.javacpp.FlyCapture2.Error in project javacv by bytedeco.
the class FlyCapture2FrameGrabber method start.
public void start() throws FrameGrabber.Exception {
// TODO: Default 30 ?
int f = FRAMERATE_30;
if (frameRate <= 0) {
f = FRAMERATE_30;
} else if (frameRate <= 1.876) {
f = FRAMERATE_1_875;
} else if (frameRate <= 3.76) {
f = FRAMERATE_3_75;
} else if (frameRate <= 7.51) {
f = FRAMERATE_7_5;
} else if (frameRate <= 15.01) {
f = FRAMERATE_15;
} else if (frameRate <= 30.01) {
f = FRAMERATE_30;
} else if (frameRate <= 60.01) {
f = FRAMERATE_60;
} else if (frameRate <= 120.01) {
f = FRAMERATE_120;
} else if (frameRate <= 240.01) {
f = FRAMERATE_240;
}
int c = VIDEOMODE_ANY;
if (imageMode == FrameGrabber.ImageMode.COLOR || imageMode == FrameGrabber.ImageMode.RAW) {
if (imageWidth <= 0 || imageHeight <= 0) {
c = VIDEOMODE_ANY;
} else if (imageWidth <= 640 && imageHeight <= 480) {
c = VIDEOMODE_640x480RGB;
} else if (imageWidth <= 800 && imageHeight <= 600) {
c = VIDEOMODE_800x600RGB;
} else if (imageWidth <= 1024 && imageHeight <= 768) {
c = VIDEOMODE_1024x768RGB;
} else if (imageWidth <= 1280 && imageHeight <= 960) {
c = VIDEOMODE_1280x960RGB;
} else if (imageWidth <= 1600 && imageHeight <= 1200) {
c = VIDEOMODE_1600x1200RGB;
}
} else if (imageMode == FrameGrabber.ImageMode.GRAY) {
if (imageWidth <= 0 || imageHeight <= 0) {
c = VIDEOMODE_ANY;
} else if (imageWidth <= 640 && imageHeight <= 480) {
c = bpp > 8 ? VIDEOMODE_640x480Y16 : VIDEOMODE_640x480Y8;
} else if (imageWidth <= 800 && imageHeight <= 600) {
c = bpp > 8 ? VIDEOMODE_800x600Y16 : VIDEOMODE_800x600Y8;
} else if (imageWidth <= 1024 && imageHeight <= 768) {
c = bpp > 8 ? VIDEOMODE_1024x768Y16 : VIDEOMODE_1024x768Y8;
} else if (imageWidth <= 1280 && imageHeight <= 960) {
c = bpp > 8 ? VIDEOMODE_1280x960Y16 : VIDEOMODE_1280x960Y8;
} else if (imageWidth <= 1600 && imageHeight <= 1200) {
c = bpp > 8 ? VIDEOMODE_1600x1200Y16 : VIDEOMODE_1600x1200Y8;
}
}
// set or reset trigger mode
TriggerMode tm = new TriggerMode();
Error error = camera.GetTriggerMode(tm);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("GetTriggerMode() Error " + error.GetDescription());
}
tm.onOff(triggerMode);
tm.source(7);
tm.mode(14);
tm.parameter(0);
error = camera.SetTriggerMode(tm);
if (error.notEquals(PGRERROR_OK)) {
// try with trigger mode 0 instead
tm.onOff(true);
tm.source(7);
tm.mode(0);
tm.parameter(0);
error = camera.SetTriggerMode(tm);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("SetTriggerMode() Error " + error.GetDescription());
}
}
if (triggerMode) {
waitForTriggerReady();
}
// try to match the endianness to our platform
error = camera.ReadRegister(IMAGE_DATA_FORMAT, regOut);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("ReadRegister(IMAGE_DATA_FORMAT, regOut) Error " + error.GetDescription());
}
int reg;
if (ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN)) {
reg = regOut[0] | 0x1;
} else {
reg = regOut[0] & ~0x1;
}
error = camera.WriteRegister(IMAGE_DATA_FORMAT, reg);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("WriteRegister(IMAGE_DATA_FORMAT, reg) Error " + error.GetDescription());
}
// TODO: set fastest bus speed ? This may lead to system instability. Use default.
// set `gamma`
Property gammaProp = new Property(FlyCapture2.GAMMA);
if (gamma != 0.0) {
error = camera.GetProperty(gammaProp);
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("GetProperty(gammaProp) Error " + error.GetDescription());
}
gammaProp.onOff(true);
gammaProp.absControl(true);
gammaProp.absValue((float) gamma);
camera.SetProperty(gammaProp);
error = camera.SetProperty(gammaProp);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("SetProperty(gammaProp) Error " + error.GetDescription());
}
}
error = camera.GetProperty(gammaProp);
if (error.notEquals(PGRERROR_OK)) {
gammaOut[0] = 2.2f;
} else {
gammaOut[0] = gammaProp.absValue();
}
// set `timeout`
error = camera.StartCapture();
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("StartCapture() Error " + error.GetDescription());
}
// Get the camera configuration
FC2Config config = new FC2Config();
error = camera.GetConfiguration(config);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("GetConfiguration() Error " + error.GetDescription());
}
// Set the grab timeout to 5 seconds
config.grabTimeout(timeout);
// Set the camera configuration
error = camera.SetConfiguration(config);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("SetConfiguration() Error " + error.GetDescription());
}
}
use of org.bytedeco.javacpp.FlyCapture2.Error in project javacv by bytedeco.
the class FlyCapture2FrameGrabber method trigger.
/**
* @throws org.bytedeco.javacv.FrameGrabber.Exception
*/
public void trigger() throws FrameGrabber.Exception {
waitForTriggerReady();
Error error = camera.FireSoftwareTrigger();
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("flycaptureSetCameraRegister() Error " + error);
}
}
use of org.bytedeco.javacpp.FlyCapture2.Error in project javacv by bytedeco.
the class FlyCapture2FrameGrabber method stop.
public void stop() throws FrameGrabber.Exception {
Error error = camera.StopCapture();
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("flycapture camera StopCapture() Error " + error);
}
temp_image = null;
return_image = null;
timestamp = 0;
frameNumber = 0;
}
use of org.bytedeco.javacpp.FlyCapture2.Error in project javacv by bytedeco.
the class FlyCapture2FrameGrabber method grab.
public Frame grab() throws FrameGrabber.Exception {
Error error = camera.RetrieveBuffer(raw_image);
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("flycaptureGrabImage2() Error " + error + " (Has start() been called?)");
}
int w = raw_image.GetCols();
int h = raw_image.GetRows();
int format = raw_image.GetPixelFormat();
int depth = getDepth(format);
int stride = raw_image.GetStride();
int size = h * stride;
int numChannels = getNumChannels(format);
error = camera.ReadRegister(IMAGE_DATA_FORMAT, regOut);
if (error.notEquals(PGRERROR_OK)) {
throw new FrameGrabber.Exception("flycaptureGetCameraRegister() Error " + error);
}
ByteOrder frameEndian = (regOut[0] & 0x1) != 0 ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN;
boolean alreadySwapped = false;
boolean colorbayer = raw_image.GetBayerTileFormat() != NONE;
boolean colorrgb = format == PIXEL_FORMAT_RGB8 || format == PIXEL_FORMAT_RGB16 || format == PIXEL_FORMAT_BGR || format == PIXEL_FORMAT_BGRU;
boolean coloryuv = format == PIXEL_FORMAT_411YUV8 || format == PIXEL_FORMAT_422YUV8 || format == PIXEL_FORMAT_444YUV8;
BytePointer imageData = raw_image.GetData().capacity(raw_image.GetDataSize());
if ((depth == IPL_DEPTH_8U || frameEndian.equals(ByteOrder.nativeOrder())) && (imageMode == FrameGrabber.ImageMode.RAW || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 3) || (imageMode == FrameGrabber.ImageMode.GRAY && numChannels == 1 && !colorbayer))) {
if (return_image == null) {
return_image = IplImage.createHeader(w, h, depth, numChannels);
}
return_image.widthStep(stride);
return_image.imageSize(size);
return_image.imageData(imageData);
} else {
if (return_image == null) {
return_image = IplImage.create(w, h, depth, imageMode == FrameGrabber.ImageMode.COLOR ? 3 : 1);
}
if (temp_image == null) {
if (imageMode == FrameGrabber.ImageMode.COLOR && (numChannels > 1 || depth > 8) && !coloryuv && !colorbayer) {
temp_image = IplImage.create(w, h, depth, numChannels);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && colorbayer) {
temp_image = IplImage.create(w, h, depth, 3);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) {
temp_image = IplImage.createHeader(w, h, depth, 3);
} else if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
temp_image = IplImage.createHeader(w, h, depth, 1);
} else {
temp_image = return_image;
}
}
setStride(conv_image, temp_image.widthStep());
conv_image.SetData(temp_image.imageData(), temp_image.width() * temp_image.height() * temp_image.depth());
if (depth == IPL_DEPTH_8U) {
setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW8 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO8 : PIXEL_FORMAT_BGR);
} else {
setPixelFormat(conv_image, imageMode == FrameGrabber.ImageMode.RAW ? PIXEL_FORMAT_RAW16 : temp_image.nChannels() == 1 ? PIXEL_FORMAT_MONO16 : PIXEL_FORMAT_RGB16);
}
if (depth != IPL_DEPTH_8U && conv_image.GetPixelFormat() == format && conv_image.GetStride() == stride) {
// we just need a copy to swap bytes..
ShortBuffer in = imageData.asByteBuffer().order(frameEndian).asShortBuffer();
ShortBuffer out = temp_image.getByteBuffer().order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
alreadySwapped = true;
} else if ((imageMode == FrameGrabber.ImageMode.GRAY && colorrgb) || (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer)) {
temp_image.widthStep(stride);
temp_image.imageSize(size);
temp_image.imageData(imageData);
} else if (!colorrgb && (colorbayer || coloryuv || numChannels > 1)) {
error = raw_image.Convert(conv_image);
// error = flycaptureConvertImage(context, raw_image, conv_image);
if (error.notEquals(PGRERROR_OK)) {
PrintError(error);
throw new FrameGrabber.Exception("raw_image.Convert Error " + error);
}
}
if (!alreadySwapped && depth != IPL_DEPTH_8U && !frameEndian.equals(ByteOrder.nativeOrder())) {
// ack, the camera's endianness doesn't correspond to our machine ...
// swap bytes of 16-bit images
ByteBuffer bb = temp_image.getByteBuffer();
ShortBuffer in = bb.order(frameEndian).asShortBuffer();
ShortBuffer out = bb.order(ByteOrder.nativeOrder()).asShortBuffer();
out.put(in);
}
if (imageMode == FrameGrabber.ImageMode.COLOR && numChannels == 1 && !coloryuv && !colorbayer) {
cvCvtColor(temp_image, return_image, CV_GRAY2BGR);
} else if (imageMode == FrameGrabber.ImageMode.GRAY && (colorbayer || colorrgb)) {
cvCvtColor(temp_image, return_image, CV_BGR2GRAY);
}
}
int bayerFormat = cameraInfo.bayerTileFormat();
switch(bayerFormat) {
case BGGR:
sensorPattern = SENSOR_PATTERN_BGGR;
break;
case GBRG:
sensorPattern = SENSOR_PATTERN_GBRG;
break;
case GRBG:
sensorPattern = SENSOR_PATTERN_GRBG;
break;
case RGGB:
sensorPattern = SENSOR_PATTERN_RGGB;
break;
default:
sensorPattern = -1L;
}
TimeStamp timeStamp = raw_image.GetTimeStamp();
timestamp = timeStamp.seconds() * 1000000L + timeStamp.microSeconds();
return converter.convert(return_image);
}
Aggregations