use of org.red5.server.net.rtmp.event.VideoData in project bigbluebutton by bigbluebutton.
the class ScreenVideoBroadcastStream method dispatchEvent.
public void dispatchEvent(IEvent event) {
try {
// System.out.println("dispatchEvent(event:screenVideo)");
if (event instanceof IRTMPEvent) {
IRTMPEvent rtmpEvent = (IRTMPEvent) event;
if (livePipe != null) {
RTMPMessage msg = RTMPMessage.build(rtmpEvent, Constants.SOURCE_TYPE_LIVE);
if (creationTime == null)
creationTime = (long) rtmpEvent.getTimestamp();
try {
IVideoStreamCodec videoStreamCodec = new ScreenVideo();
streamCodecInfo.setHasVideo(true);
streamCodecInfo.setVideoCodec(videoStreamCodec);
videoStreamCodec.reset();
videoStreamCodec.addData(((VideoData) rtmpEvent).getData());
livePipe.pushMessage(msg);
// Notify listeners about received packet
if (rtmpEvent instanceof IStreamPacket) {
for (IStreamListener listener : getStreamListeners()) {
try {
listener.packetReceived(this, (IStreamPacket) rtmpEvent);
} catch (Exception e) {
log.error("Error while notifying listener " + listener, e);
}
}
}
} catch (IOException ex) {
// ignore
log.error("Got exception: {}", ex);
}
}
}
} finally {
}
}
use of org.red5.server.net.rtmp.event.VideoData in project bigbluebutton by bigbluebutton.
the class ScreenshareStreamListener method packetReceived.
@Override
public void packetReceived(IBroadcastStream stream, IStreamPacket packet) {
IoBuffer buf = packet.getData();
if (buf != null)
buf.rewind();
if (buf == null || buf.remaining() == 0) {
return;
}
if (packet instanceof VideoData) {
if (!firstPacketReceived) {
firstPacketReceived = true;
IConnection conn = Red5.getConnectionLocal();
String meetingId = conn.getScope().getName();
String filename = recordingDir;
if (!filename.endsWith("/")) {
filename.concat("/");
}
filename = filename.concat(meetingId).concat("/").concat(stream.getPublishedName()).concat(".flv");
Map<String, String> event = new HashMap<String, String>();
event.put("module", "Deskshare");
event.put("timestamp", genTimestamp().toString());
event.put("meetingId", meetingId);
event.put("file", filename);
event.put("stream", stream.getPublishedName());
event.put("eventName", "DeskshareStartedEvent");
recordingService.record(conn.getScope().getName(), event);
}
}
}
use of org.red5.server.net.rtmp.event.VideoData in project openmeetings by apache.
the class CaptureScreen method pushVideo.
public void pushVideo(VideoData data, int ts) {
if (startPublish) {
if (Red5.getConnectionLocal() == null) {
Red5.setConnectionLocal(client.getConnection());
}
RTMPMessage rtmpMsg = RTMPMessage.build(data, ts);
client.publishStreamData(streamId, rtmpMsg);
}
}
use of org.red5.server.net.rtmp.event.VideoData in project openmeetings by apache.
the class EncodeJob method execute.
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
JobDataMap data = context.getJobDetail().getJobDataMap();
CaptureScreen capture = (CaptureScreen) data.get(CAPTURE_KEY);
if (screen == null) {
dim = capture.getDim();
screen = new Rectangle(dim.getSpinnerX(), dim.getSpinnerY(), dim.getSpinnerWidth(), dim.getSpinnerHeight());
}
long start = 0;
if (log.isTraceEnabled()) {
start = System.currentTimeMillis();
}
image = ScreenV1Encoder.getImage(dim, screen, robot);
if (log.isTraceEnabled()) {
log.trace(String.format("encode: Image was captured in %s ms, size %sk", System.currentTimeMillis() - start, 4 * image.length * image[0].length / 1024));
start = System.currentTimeMillis();
}
try {
VideoData vData = capture.getEncoder().encode(image);
if (log.isTraceEnabled()) {
long now = System.currentTimeMillis();
log.trace(String.format("encode: Image was encoded in %s ms, timestamp is %s", now - start, now - capture.getStartTime()));
}
capture.getFrames().offer(vData);
capture.getEncoder().createUnalteredFrame();
} catch (Exception e) {
log.error("Error while encoding: ", e);
}
}
use of org.red5.server.net.rtmp.event.VideoData in project openmeetings by apache.
the class SendJob method execute.
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
JobDataMap data = context.getJobDetail().getJobDataMap();
CaptureScreen capture = (CaptureScreen) data.get(CAPTURE_KEY);
capture.setSendFrameGuard(true);
if (log.isTraceEnabled()) {
long real = System.currentTimeMillis() - capture.getStartTime();
log.trace(String.format("send: Enter method, timestamp: %s, real: %s, diff: %s", capture.getTimestamp(), real, real - capture.getTimestamp().get()));
}
VideoData f = capture.getFrames().poll();
if (log.isTraceEnabled()) {
log.trace(String.format("send: Getting %s image", f == null ? "DUMMY" : "CAPTURED"));
}
f = f == null ? capture.getEncoder().getUnalteredFrame() : f;
if (f != null) {
capture.pushVideo(f, capture.getTimestamp().get());
if (log.isTraceEnabled()) {
long real = System.currentTimeMillis() - capture.getStartTime();
log.trace(String.format("send: Sending video %sk, timestamp: %s, real: %s, diff: %s", f.getData().capacity() / 1024, capture.getTimestamp(), real, real - capture.getTimestamp().get()));
}
capture.getTimestamp().addAndGet(capture.getTimestampDelta());
if (log.isTraceEnabled()) {
log.trace(String.format("send: new timestamp: %s", capture.getTimestamp()));
}
} else if (log.isTraceEnabled()) {
log.trace("send: nothing to send");
}
capture.setSendFrameGuard(false);
}
Aggregations