use of javax.media.Format in project Spark by igniterealtime.
the class JMFInit method detectDirectAudio.
private void detectDirectAudio() {
Class<?> cls;
int plType = PlugInManager.RENDERER;
String dar = "com.sun.media.renderer.audio.DirectAudioRenderer";
try {
// Check if this is the Windows Performance Pack - hack
Class.forName("VFWAuto");
// Check if DS capture is supported, otherwise fail DS renderer
// since NT doesn't have capture
Class.forName("com.sun.media.protocol.dsound.DSound");
// Find the renderer class and instantiate it.
cls = Class.forName(dar);
Renderer rend = (Renderer) cls.newInstance();
try {
// Set the format and open the device
AudioFormat af = new AudioFormat(AudioFormat.LINEAR, 44100, 16, 2);
rend.setInputFormat(af);
rend.open();
Format[] inputFormats = rend.getSupportedInputFormats();
// Register the device
PlugInManager.addPlugIn(dar, inputFormats, new Format[0], plType);
// Move it to the top of the list
Vector<String> rendList = PlugInManager.getPlugInList(null, null, plType);
int listSize = rendList.size();
if (rendList.elementAt(listSize - 1).equals(dar)) {
rendList.removeElementAt(listSize - 1);
rendList.insertElementAt(dar, 0);
PlugInManager.setPlugInList(rendList, plType);
PlugInManager.commit();
// Log.debug("registered");
}
rend.close();
} catch (Throwable t) {
// Log.debug("Error " + t);
}
} catch (Throwable tt) {
// Nothing to do
}
}
use of javax.media.Format in project Smack by igniterealtime.
the class AudioChannel method createProcessor.
private String createProcessor() {
if (locator == null)
return "Locator is null";
DataSource ds;
try {
ds = javax.media.Manager.createDataSource(locator);
} catch (Exception e) {
// Try JavaSound Locator as a last resort
try {
ds = javax.media.Manager.createDataSource(new MediaLocator("javasound://"));
} catch (Exception ee) {
return "Couldn't create DataSource";
}
}
// Try to create a processor to handle the input jmf locator
try {
processor = javax.media.Manager.createProcessor(ds);
} catch (NoProcessorException npe) {
LOGGER.log(Level.WARNING, "exception", npe);
return "Couldn't create processor";
} catch (IOException ioe) {
LOGGER.log(Level.WARNING, "exception", ioe);
return "IOException creating processor";
}
// Wait for it to configure
boolean result = waitForState(processor, Processor.Configured);
if (!result) {
return "Couldn't configure processor";
}
// Get the tracks from the processor
TrackControl[] tracks = processor.getTrackControls();
// Do we have at least one track?
if (tracks == null || tracks.length < 1) {
return "Couldn't find tracks in processor";
}
// Set the output content descriptor to RAW_RTP
// This will limit the supported formats reported from
// Track.getSupportedFormats to only valid RTP formats.
ContentDescriptor cd = new ContentDescriptor(ContentDescriptor.RAW_RTP);
processor.setContentDescriptor(cd);
Format[] supported;
Format chosen = null;
boolean atLeastOneTrack = false;
// Program the tracks.
for (int i = 0; i < tracks.length; i++) {
if (tracks[i].isEnabled()) {
supported = tracks[i].getSupportedFormats();
if (supported.length > 0) {
for (Format format : supported) {
if (format instanceof AudioFormat) {
if (this.format.matches(format))
chosen = format;
}
}
if (chosen != null) {
tracks[i].setFormat(chosen);
LOGGER.severe("Track " + i + " is set to transmit as: " + chosen);
if (tracks[i].getFormat() instanceof AudioFormat) {
int packetRate = 20;
PacketSizeControl pktCtrl = (PacketSizeControl) processor.getControl(PacketSizeControl.class.getName());
if (pktCtrl != null) {
try {
pktCtrl.setPacketSize(getPacketSize(tracks[i].getFormat(), packetRate));
} catch (IllegalArgumentException e) {
pktCtrl.setPacketSize(80);
// Do nothing
}
}
if (tracks[i].getFormat().getEncoding().equals(AudioFormat.ULAW_RTP)) {
Codec[] codec = new Codec[3];
codec[0] = new com.ibm.media.codec.audio.rc.RCModule();
codec[1] = new com.ibm.media.codec.audio.ulaw.JavaEncoder();
codec[2] = new com.sun.media.codec.audio.ulaw.Packetizer();
((com.sun.media.codec.audio.ulaw.Packetizer) codec[2]).setPacketSize(160);
try {
tracks[i].setCodecChain(codec);
} catch (UnsupportedPlugInException e) {
LOGGER.log(Level.WARNING, "exception", e);
}
}
}
atLeastOneTrack = true;
} else
tracks[i].setEnabled(false);
} else
tracks[i].setEnabled(false);
}
}
if (!atLeastOneTrack)
return "Couldn't set any of the tracks to a valid RTP format";
result = waitForState(processor, Controller.Realized);
if (!result)
return "Couldn't realize processor";
// Get the output data source of the processor
dataOutput = processor.getDataOutput();
return null;
}
use of javax.media.Format in project Smack by igniterealtime.
the class JMFInit method detectDirectAudio.
private static void detectDirectAudio() {
Class<?> cls;
int plType = PlugInManager.RENDERER;
String dar = "com.sun.media.renderer.audio.DirectAudioRenderer";
try {
// Check if this is the Windows Performance Pack - hack
cls = Class.forName("VFWAuto");
// Check if DS capture is supported, otherwise fail DS renderer
// since NT doesn't have capture
cls = Class.forName("com.sun.media.protocol.dsound.DSound");
// Find the renderer class and instantiate it.
cls = Class.forName(dar);
Renderer rend = (Renderer) cls.getConstructor().newInstance();
try {
// Set the format and open the device
AudioFormat af = new AudioFormat(AudioFormat.LINEAR, 44100, 16, 2);
rend.setInputFormat(af);
rend.open();
Format[] inputFormats = rend.getSupportedInputFormats();
// Register the device
PlugInManager.addPlugIn(dar, inputFormats, new Format[0], plType);
// Move it to the top of the list
@SuppressWarnings("unchecked") Vector<String> rendList = PlugInManager.getPlugInList(null, null, plType);
int listSize = rendList.size();
if (rendList.elementAt(listSize - 1).equals(dar)) {
rendList.removeElementAt(listSize - 1);
rendList.insertElementAt(dar, 0);
PlugInManager.setPlugInList(rendList, plType);
PlugInManager.commit();
// Log.debug("registered");
}
rend.close();
} catch (Throwable t) {
// Log.debug("Error " + t);
}
} catch (Throwable tt) {
// Do nothing.
}
}
use of javax.media.Format in project Spark by igniterealtime.
the class VideoChannel method main.
public static void main(String[] args) {
InetAddress localhost;
try {
// FMJ
RegistryDefaults.registerAll(RegistryDefaults.FMJ | RegistryDefaults.FMJ_NATIVE);
// PlugInManager.addPlugIn(, in, out, type)
LibJitsi.start();
// Add Device
MediaType[] mediaTypes = MediaType.values();
MediaService mediaService = LibJitsi.getMediaService();
// LOG ALL Devices
final Vector<CaptureDeviceInfo> vectorDevices = CaptureDeviceManager.getDeviceList(null);
for (CaptureDeviceInfo infoCaptureDevice : vectorDevices) {
System.err.println("===========> " + infoCaptureDevice.getName());
for (Format format : infoCaptureDevice.getFormats()) {
System.err.println(format);
}
}
localhost = InetAddress.getLocalHost();
VideoChannel videoChannel0 = new VideoChannel(new MediaLocator("civil:/dev/video0"), localhost.getHostAddress(), localhost.getHostAddress(), 7002, 7020, new VideoFormat(VideoFormat.JPEG_RTP));
VideoChannel videoChannel1 = new VideoChannel(new MediaLocator("civil:/dev/video1"), localhost.getHostAddress(), localhost.getHostAddress(), 7020, 7002, new VideoFormat(VideoFormat.JPEG_RTP));
videoChannel0.start();
videoChannel1.start();
try {
Thread.sleep(50000);
} catch (InterruptedException e) {
e.printStackTrace();
}
videoChannel0.setTrasmit(false);
videoChannel1.setTrasmit(false);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
videoChannel0.setTrasmit(true);
videoChannel1.setTrasmit(true);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
videoChannel0.stop();
videoChannel1.stop();
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
Aggregations