use of net.pms.configuration.PmsConfiguration in project UniversalMediaServer by UniversalMediaServer.
the class Player method setAudioOutputParameters.
/**
* This method populates the supplied {@link OutputParams} object with the correct audio track (aid)
* based on the MediaInfo metadata and PMS configuration settings.
*
* @param media
* The MediaInfo metadata for the file.
* @param params
* The parameters to populate.
*/
public static void setAudioOutputParameters(DLNAMediaInfo media, OutputParams params) {
// Use device-specific pms conf
PmsConfiguration configuration = PMS.getConfiguration(params);
if (params.aid == null && media != null && media.getFirstAudioTrack() != null) {
// check for preferred audio
DLNAMediaAudio dtsTrack = null;
StringTokenizer st = new StringTokenizer(configuration.getAudioLanguages(), ",");
while (st.hasMoreTokens()) {
String lang = st.nextToken().trim();
LOGGER.trace("Looking for an audio track with lang: " + lang);
for (DLNAMediaAudio audio : media.getAudioTracksList()) {
if (audio.matchCode(lang)) {
params.aid = audio;
LOGGER.trace("Matched audio track: " + audio);
return;
}
if (dtsTrack == null && audio.isDTS()) {
dtsTrack = audio;
}
}
}
// preferred audio not found, take a default audio track, dts first if available
if (dtsTrack != null) {
params.aid = dtsTrack;
LOGGER.trace("Found priority audio track with DTS: " + dtsTrack);
} else {
params.aid = media.getAudioTracksList().get(0);
LOGGER.trace("Chose a default audio track: " + params.aid);
}
}
}
use of net.pms.configuration.PmsConfiguration in project UniversalMediaServer by UniversalMediaServer.
the class FFMpegVideo method launchTranscode.
@Override
public synchronized ProcessWrapper launchTranscode(DLNAResource dlna, DLNAMediaInfo media, OutputParams params) throws IOException {
final String filename = dlna.getFileName();
InputFile newInput = new InputFile();
newInput.setFilename(filename);
newInput.setPush(params.stdin);
// Use device-specific pms conf
PmsConfiguration prev = configuration;
configuration = (DeviceConfiguration) params.mediaRenderer;
RendererConfiguration renderer = params.mediaRenderer;
/*
* Check if the video track and the container report different aspect ratios
*/
boolean aspectRatiosMatch = true;
if (media.getAspectRatioContainer() != null && media.getAspectRatioVideoTrack() != null && !media.getAspectRatioContainer().equals(media.getAspectRatioVideoTrack())) {
aspectRatiosMatch = false;
}
/*
* FFmpeg uses multithreading by default, so provided that the
* user has not disabled FFmpeg multithreading and has not
* chosen to use more or less threads than are available, do not
* specify how many cores to use.
*/
int nThreads = 1;
if (configuration.isFfmpegMultithreading()) {
if (Runtime.getRuntime().availableProcessors() == configuration.getNumberOfCpuCores()) {
nThreads = 0;
} else {
nThreads = configuration.getNumberOfCpuCores();
}
}
List<String> cmdList = new ArrayList<>();
boolean avisynth = avisynth();
if (params.timeseek > 0) {
params.waitbeforestart = 1;
} else if (renderer.isTranscodeFastStart()) {
params.manageFastStart();
} else {
params.waitbeforestart = 2500;
}
setAudioAndSubs(filename, media, params);
dlna.setMediaSubtitle(params.sid);
cmdList.add(executable());
// Prevent FFmpeg timeout
cmdList.add("-y");
cmdList.add("-loglevel");
if (LOGGER.isTraceEnabled()) {
// Set -loglevel in accordance with LOGGER setting
// Could be changed to "verbose" or "debug" if "info" level is not enough
cmdList.add("info");
} else {
cmdList.add("fatal");
}
if (params.timeseek > 0) {
cmdList.add("-ss");
cmdList.add(String.valueOf(params.timeseek));
}
// Decoder threads
if (nThreads > 0) {
cmdList.add("-threads");
cmdList.add(String.valueOf(nThreads));
}
final boolean isTsMuxeRVideoEngineEnabled = configuration.getEnginesAsList(PMS.get().getRegistry()).contains(TsMuxeRVideo.ID);
final boolean isXboxOneWebVideo = params.mediaRenderer.isXboxOne() && purpose() == VIDEO_WEBSTREAM_PLAYER;
ac3Remux = false;
dtsRemux = false;
if (configuration.isAudioRemuxAC3() && params.aid != null && params.aid.isAC3() && !avisynth() && renderer.isTranscodeToAC3() && !isXboxOneWebVideo && params.aid.getAudioProperties().getNumberOfChannels() <= configuration.getAudioChannelCount()) {
// AC-3 remux takes priority
ac3Remux = true;
} else {
// Now check for DTS remux and LPCM streaming
dtsRemux = isTsMuxeRVideoEngineEnabled && configuration.isAudioEmbedDtsInPcm() && params.aid != null && params.aid.isDTS() && !avisynth() && params.mediaRenderer.isDTSPlayable();
}
String frameRateRatio = media.getValidFps(true);
String frameRateNumber = media.getValidFps(false);
// Input filename
cmdList.add("-i");
if (avisynth && !filename.toLowerCase().endsWith(".iso")) {
File avsFile = AviSynthFFmpeg.getAVSScript(filename, params.sid, params.fromFrame, params.toFrame, frameRateRatio, frameRateNumber, configuration);
cmdList.add(ProcessUtil.getShortFileNameIfWideChars(avsFile.getAbsolutePath()));
} else {
if (params.stdin != null) {
cmdList.add("pipe:");
} else {
cmdList.add(filename);
}
}
/**
* Defer to MEncoder for subtitles if:
* - The setting is enabled
* - There are subtitles to transcode
* - The file is not being played via the transcode folder
*/
if (!(renderer instanceof RendererConfiguration.OutputOverride) && params.sid != null && !(configuration.isShowTranscodeFolder() && dlna.isNoName() && (dlna.getParent() instanceof FileTranscodeVirtualFolder)) && configuration.isFFmpegDeferToMEncoderForProblematicSubtitles() && params.sid.isEmbedded() && ((params.sid.getType().isText() && params.sid.getType() != SubtitleType.ASS) || params.sid.getType() == SubtitleType.VOBSUB)) {
LOGGER.trace("Switching from FFmpeg to MEncoder to transcode subtitles because the user setting is enabled.");
MEncoderVideo mv = new MEncoderVideo();
return mv.launchTranscode(dlna, media, params);
}
// Decide whether to defer to tsMuxeR or continue to use FFmpeg
if (!(renderer instanceof RendererConfiguration.OutputOverride) && configuration.isFFmpegMuxWithTsMuxerWhenCompatible()) {
// Decide whether to defer to tsMuxeR or continue to use FFmpeg
boolean deferToTsmuxer = true;
String prependTraceReason = "Not muxing the video stream with tsMuxeR via FFmpeg because ";
if (deferToTsmuxer == true && configuration.isShowTranscodeFolder() && dlna.isNoName() && (dlna.getParent() instanceof FileTranscodeVirtualFolder)) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the file is being played via a FFmpeg entry in the transcode folder.");
}
if (deferToTsmuxer == true && !params.mediaRenderer.isMuxH264MpegTS()) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the renderer does not support H.264 inside MPEG-TS.");
}
if (deferToTsmuxer == true && params.sid != null) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "we need to burn subtitles.");
}
if (deferToTsmuxer == true && avisynth()) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "we are using AviSynth.");
}
if (deferToTsmuxer == true && params.mediaRenderer.isH264Level41Limited() && !media.isVideoWithinH264LevelLimits(newInput, params.mediaRenderer)) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the video stream is not within H.264 level limits for this renderer.");
}
if (deferToTsmuxer == true && !media.isMuxable(params.mediaRenderer)) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the video stream is not muxable to this renderer");
}
if (deferToTsmuxer == true && !aspectRatiosMatch) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "we need to transcode to apply the correct aspect ratio.");
}
if (deferToTsmuxer == true && !params.mediaRenderer.isPS3() && media.isWebDl(filename, params)) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the version of tsMuxeR supported by this renderer does not support WEB-DL files.");
}
if (deferToTsmuxer == true && "bt.601".equals(media.getMatrixCoefficients())) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the colorspace probably isn't supported by the renderer.");
}
if (deferToTsmuxer == true && (params.mediaRenderer.isKeepAspectRatio() || params.mediaRenderer.isKeepAspectRatioTranscoding()) && !"16:9".equals(media.getAspectRatioContainer())) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the renderer needs us to add borders so it displays the correct aspect ratio of " + media.getAspectRatioContainer() + ".");
}
if (deferToTsmuxer == true && !params.mediaRenderer.isResolutionCompatibleWithRenderer(media.getWidth(), media.getHeight())) {
deferToTsmuxer = false;
LOGGER.trace(prependTraceReason + "the resolution is incompatible with the renderer.");
}
if (deferToTsmuxer) {
TsMuxeRVideo tv = new TsMuxeRVideo();
params.forceFps = media.getValidFps(false);
if (media.getCodecV() != null) {
if (media.isH264()) {
params.forceType = "V_MPEG4/ISO/AVC";
} else if (media.getCodecV().startsWith("mpeg2")) {
params.forceType = "V_MPEG-2";
} else if (media.getCodecV().equals("vc1")) {
params.forceType = "V_MS/VFW/WVC1";
}
}
return tv.launchTranscode(dlna, media, params);
}
}
// Apply any video filters and associated options. These should go
// after video input is specified and before output streams are mapped.
cmdList.addAll(getVideoFilterOptions(dlna, media, params));
// Map the output streams if necessary
if (media.getAudioTracksList().size() > 1) {
// Set the video stream
cmdList.add("-map");
cmdList.add("0:v");
// Set the proper audio stream
cmdList.add("-map");
cmdList.add("0:a:" + (media.getAudioTracksList().indexOf(params.aid)));
}
// Encoder threads
if (nThreads > 0) {
cmdList.add("-threads");
cmdList.add(String.valueOf(nThreads));
}
if (params.timeend > 0) {
cmdList.add("-t");
cmdList.add(String.valueOf(params.timeend));
}
// Add the output options (-f, -c:a, -c:v, etc.)
// Now that inputs and filtering are complete, see if we should
// give the renderer the final say on the command
boolean override = false;
if (renderer instanceof RendererConfiguration.OutputOverride) {
override = ((RendererConfiguration.OutputOverride) renderer).getOutputOptions(cmdList, dlna, this, params);
}
if (!override) {
cmdList.addAll(getVideoBitrateOptions(dlna, media, params));
String customFFmpegOptions = renderer.getCustomFFmpegOptions();
// Audio bitrate
if (!ac3Remux && !dtsRemux && !(type() == Format.AUDIO)) {
int channels = 0;
if ((renderer.isTranscodeToWMV() && !renderer.isXbox360()) || (renderer.isXboxOne() && purpose() == VIDEO_WEBSTREAM_PLAYER)) {
channels = 2;
} else if (params.aid != null && params.aid.getAudioProperties().getNumberOfChannels() > configuration.getAudioChannelCount()) {
channels = configuration.getAudioChannelCount();
}
if (!customFFmpegOptions.contains("-ac ") && channels > 0) {
cmdList.add("-ac");
cmdList.add(String.valueOf(channels));
}
if (!customFFmpegOptions.contains("-ab ")) {
cmdList.add("-ab");
if (renderer.isTranscodeToAAC()) {
cmdList.add(Math.min(configuration.getAudioBitrate(), 320) + "k");
} else {
cmdList.add(String.valueOf(CodecUtil.getAC3Bitrate(configuration, params.aid)) + "k");
}
}
if (!customFFmpegOptions.contains("-ar ")) {
cmdList.add("-ar");
cmdList.add("" + params.mediaRenderer.getTranscodedVideoAudioSampleRate());
}
}
// Add the output options (-f, -c:a, -c:v, etc.)
cmdList.addAll(getVideoTranscodeOptions(dlna, media, params));
// Add custom options
if (StringUtils.isNotEmpty(customFFmpegOptions)) {
parseOptions(customFFmpegOptions, cmdList);
}
}
// Set up the process
PipeProcess pipe = null;
if (!dtsRemux) {
// cmdList.add("pipe:");
// basename of the named pipe:
String fifoName = String.format("ffmpegvideo_%d_%d", Thread.currentThread().getId(), System.currentTimeMillis());
// This process wraps the command that creates the named pipe
pipe = new PipeProcess(fifoName);
// delete the named pipe later; harmless if it isn't created
pipe.deleteLater();
params.input_pipes[0] = pipe;
// Output file
cmdList.add(pipe.getInputPipe());
}
String[] cmdArray = new String[cmdList.size()];
cmdList.toArray(cmdArray);
cmdArray = finalizeTranscoderArgs(filename, dlna, media, params, cmdArray);
ProcessWrapperImpl pw = new ProcessWrapperImpl(cmdArray, params);
setOutputParsing(dlna, pw, false);
if (!dtsRemux) {
ProcessWrapper mkfifo_process = pipe.getPipeProcess();
/**
* It can take a long time for Windows to create a named pipe (and
* mkfifo can be slow if /tmp isn't memory-mapped), so run this in
* the current thread.
*/
mkfifo_process.runInSameThread();
// Clean up the mkfifo process when the transcode ends
pw.attachProcess(mkfifo_process);
// Give the mkfifo process a little time
try {
Thread.sleep(300);
} catch (InterruptedException e) {
LOGGER.error("Thread interrupted while waiting for named pipe to be created", e);
}
} else {
pipe = new PipeProcess(System.currentTimeMillis() + "tsmuxerout.ts");
TsMuxeRVideo ts = new TsMuxeRVideo();
File f = new File(configuration.getTempFolder(), "pms-tsmuxer.meta");
String[] cmd = new String[] { ts.executable(), f.getAbsolutePath(), pipe.getInputPipe() };
pw = new ProcessWrapperImpl(cmd, params);
PipeIPCProcess ffVideoPipe = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegvideo", System.currentTimeMillis() + "videoout", false, true);
cmdList.add(ffVideoPipe.getInputPipe());
OutputParams ffparams = new OutputParams(configuration);
ffparams.maxBufferSize = 1;
ffparams.stdin = params.stdin;
String[] cmdArrayDts = new String[cmdList.size()];
cmdList.toArray(cmdArrayDts);
cmdArrayDts = finalizeTranscoderArgs(filename, dlna, media, params, cmdArrayDts);
ProcessWrapperImpl ffVideo = new ProcessWrapperImpl(cmdArrayDts, ffparams);
ProcessWrapper ff_video_pipe_process = ffVideoPipe.getPipeProcess();
pw.attachProcess(ff_video_pipe_process);
ff_video_pipe_process.runInNewThread();
ffVideoPipe.deleteLater();
pw.attachProcess(ffVideo);
ffVideo.runInNewThread();
PipeIPCProcess ffAudioPipe = new PipeIPCProcess(System.currentTimeMillis() + "ffmpegaudio01", System.currentTimeMillis() + "audioout", false, true);
StreamModifier sm = new StreamModifier();
sm.setPcm(false);
sm.setDtsEmbed(dtsRemux);
sm.setSampleFrequency(48000);
sm.setBitsPerSample(16);
sm.setNbChannels(2);
List<String> cmdListDTS = new ArrayList<>();
cmdListDTS.add(executable());
cmdListDTS.add("-y");
cmdListDTS.add("-ss");
if (params.timeseek > 0) {
cmdListDTS.add(String.valueOf(params.timeseek));
} else {
cmdListDTS.add("0");
}
if (params.stdin == null) {
cmdListDTS.add("-i");
} else {
cmdListDTS.add("-");
}
cmdListDTS.add(filename);
if (params.timeseek > 0) {
cmdListDTS.add("-copypriorss");
cmdListDTS.add("0");
cmdListDTS.add("-avoid_negative_ts");
cmdListDTS.add("1");
}
cmdListDTS.add("-ac");
cmdListDTS.add("2");
cmdListDTS.add("-f");
cmdListDTS.add("dts");
cmdListDTS.add("-c:a");
cmdListDTS.add("copy");
cmdListDTS.add(ffAudioPipe.getInputPipe());
String[] cmdArrayDTS = new String[cmdListDTS.size()];
cmdListDTS.toArray(cmdArrayDTS);
if (!params.mediaRenderer.isMuxDTSToMpeg()) {
// No need to use the PCM trick when media renderer supports DTS
ffAudioPipe.setModifier(sm);
}
OutputParams ffaudioparams = new OutputParams(configuration);
ffaudioparams.maxBufferSize = 1;
ffaudioparams.stdin = params.stdin;
ProcessWrapperImpl ffAudio = new ProcessWrapperImpl(cmdArrayDTS, ffaudioparams);
params.stdin = null;
try (PrintWriter pwMux = new PrintWriter(f)) {
pwMux.println("MUXOPT --no-pcr-on-video-pid --no-asyncio --new-audio-pes --vbr --vbv-len=500");
String videoType = "V_MPEG-2";
if (renderer.isTranscodeToH264()) {
videoType = "V_MPEG4/ISO/AVC";
}
if (params.no_videoencode && params.forceType != null) {
videoType = params.forceType;
}
StringBuilder fps = new StringBuilder();
fps.append("");
if (params.forceFps != null) {
fps.append("fps=").append(params.forceFps).append(", ");
}
String audioType = "A_AC3";
if (dtsRemux) {
if (params.mediaRenderer.isMuxDTSToMpeg()) {
// Renderer can play proper DTS track
audioType = "A_DTS";
} else {
// DTS padded in LPCM trick
audioType = "A_LPCM";
}
}
pwMux.println(videoType + ", \"" + ffVideoPipe.getOutputPipe() + "\", " + fps + "level=4.1, insertSEI, contSPS, track=1");
pwMux.println(audioType + ", \"" + ffAudioPipe.getOutputPipe() + "\", track=2");
}
ProcessWrapper pipe_process = pipe.getPipeProcess();
pw.attachProcess(pipe_process);
pipe_process.runInNewThread();
try {
wait(50);
} catch (InterruptedException e) {
}
pipe.deleteLater();
params.input_pipes[0] = pipe;
ProcessWrapper ff_pipe_process = ffAudioPipe.getPipeProcess();
pw.attachProcess(ff_pipe_process);
ff_pipe_process.runInNewThread();
try {
wait(50);
} catch (InterruptedException e) {
}
ffAudioPipe.deleteLater();
pw.attachProcess(ffAudio);
ffAudio.runInNewThread();
}
// Launch the transcode command...
pw.runInNewThread();
// ...and wait briefly to allow it to start
try {
Thread.sleep(200);
} catch (InterruptedException e) {
LOGGER.error("Thread interrupted while waiting for transcode to start", e.getMessage());
LOGGER.trace("", e);
}
configuration = prev;
return pw;
}
use of net.pms.configuration.PmsConfiguration in project UniversalMediaServer by UniversalMediaServer.
the class DCRaw method getImage.
/**
* Converts {@code fileName} into PPM format.
*
* @param params the {@link OutputParams} to use. Can be {@code null}.
* @param fileName the path of the image file to process.
* @param imageInfo the {@link ImageInfo} for the image file. Can be {@code null}.
* @return A byte array containing the converted image or {@code null}.
* @throws IOException if an IO error occurs.
*/
@Override
public byte[] getImage(OutputParams params, String fileName, ImageInfo imageInfo) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Decoding image \"{}\" with DCRaw", fileName);
}
if (params == null) {
params = new OutputParams(PMS.getConfiguration());
}
// Use device-specific pms conf
PmsConfiguration configuration = PMS.getConfiguration(params);
params.log = false;
// Setting the buffer to the size of the source file or 5 MB. The
// output won't be the same size as the input, but it will hopefully
// give us a somewhat relevant buffer size. Every time the buffer has
// to grow, the whole buffer must be copied in memory.
params.outputByteArrayStreamBufferSize = imageInfo != null && imageInfo.getSize() != ImageInfo.SIZE_UNKNOWN ? (int) imageInfo.getSize() : 5000000;
// First try to get the embedded thumbnail
String[] cmdArray = new String[5];
cmdArray[0] = configuration.getDCRawPath();
cmdArray[1] = "-c";
cmdArray[2] = "-M";
cmdArray[3] = "-w";
cmdArray[4] = fileName;
ProcessWrapperImpl pw = new ProcessWrapperImpl(cmdArray, true, params, false, true);
pw.runInSameThread();
byte[] bytes = pw.getOutputByteArray().toByteArray();
List<String> results = pw.getResults();
if (bytes == null || bytes.length == 0) {
if (!results.isEmpty() && results.get(0).startsWith("Cannot decode file")) {
LOGGER.warn("DCRaw could not decode image \"{}\"", fileName);
} else if (!results.isEmpty()) {
LOGGER.debug("DCRaw failed to decode image \"{}\": {}", fileName, StringUtils.join(results, "\n"));
}
return null;
}
return bytes;
}
use of net.pms.configuration.PmsConfiguration in project UniversalMediaServer by UniversalMediaServer.
the class MEncoderWebVideo method launchTranscode.
@Override
public ProcessWrapper launchTranscode(DLNAResource dlna, DLNAMediaInfo media, OutputParams params) throws IOException {
// Use device-specific pms conf
PmsConfiguration prev = configuration;
configuration = (DeviceConfiguration) params.mediaRenderer;
params.minBufferSize = params.minFileSize;
params.secondread_minsize = 100000;
PipeProcess pipe = new PipeProcess("mencoder" + System.currentTimeMillis());
params.input_pipes[0] = pipe;
String[] cmdArray = new String[args().length + 4];
cmdArray[0] = executable();
final String filename = dlna.getFileName();
cmdArray[1] = filename;
System.arraycopy(args(), 0, cmdArray, 2, args().length);
cmdArray[cmdArray.length - 2] = "-o";
cmdArray[cmdArray.length - 1] = pipe.getInputPipe();
ProcessWrapper mkfifo_process = pipe.getPipeProcess();
cmdArray = finalizeTranscoderArgs(filename, dlna, media, params, cmdArray);
ProcessWrapperImpl pw = new ProcessWrapperImpl(cmdArray, params);
pw.attachProcess(mkfifo_process);
/**
* It can take a long time for Windows to create a named pipe (and
* mkfifo can be slow if /tmp isn't memory-mapped), so run this in
* the current thread.
*/
mkfifo_process.runInSameThread();
pipe.deleteLater();
pw.runInNewThread();
// Not sure what good this 50ms wait will do for the calling method.
try {
Thread.sleep(50);
} catch (InterruptedException e) {
}
configuration = prev;
return pw;
}
use of net.pms.configuration.PmsConfiguration in project UniversalMediaServer by UniversalMediaServer.
the class FormatRecognitionTest method testVirtualVideoActionInitializationCompatibility.
/**
* When PMS is in the process of starting up, something particular happens.
* The RootFolder is initialized and several VirtualVideoActions are added
* as children. VirtualVideoActions use the MPG format and at the time of
* initialization getDefaultRenderer() is used to determine whether or not
* the format can be streamed.
* <p>
* Under these conditions Format.isCompatible() must return true, or
* selecting the VirtualVideoAction will result in a "Corrupted data"
* message.
* <p>
* This test verifies the case above.
*/
@Test
public void testVirtualVideoActionInitializationCompatibility() {
boolean configurationLoaded = false;
try {
// Initialize PMS configuration like at initialization time, this
// is relevant for RendererConfiguration.isCompatible().
PMS.setConfiguration(new PmsConfiguration());
configurationLoaded = true;
} catch (ConfigurationException e) {
e.printStackTrace();
}
// Continue the test if the configuration loaded, otherwise skip it.
assumeTrue(configurationLoaded);
// Continue the test if the LibMediaInfoParser can be loaded, otherwise skip it.
assumeTrue(LibMediaInfoParser.isValid());
// Construct media info exactly as VirtualVideoAction does
DLNAMediaInfo info = new DLNAMediaInfo();
info.setContainer("mpegps");
List<DLNAMediaAudio> audioCodes = new ArrayList<>();
info.setAudioTracksList(audioCodes);
info.setMimeType("video/mpeg");
info.setCodecV("mpeg2");
info.setMediaparsed(true);
Format format = new MPG();
format.match("test.mpg");
// Test without rendererConfiguration, as can happen when plugins
// create virtual video actions under a folder.
assertEquals("VirtualVideoAction is initialized as compatible with null configuration", true, format.isCompatible(info, null));
}
Aggregations