Search in sources :

Example 1 with RecordingMetaData

use of org.apache.openmeetings.db.entity.record.RecordingMetaData in project openmeetings by apache.

the class RecordingMetaDataDao method add.

public Long add(Long recordingId, Date recordStart, boolean isAudioOnly, boolean isVideoOnly, boolean isScreenData, String streamName, Integer interiewPodId) {
    try {
        RecordingMetaData metaData = new RecordingMetaData();
        metaData.setRecording(recordingDao.get(recordingId));
        metaData.setRecordStart(recordStart);
        metaData.setAudioOnly(isAudioOnly);
        metaData.setVideoOnly(isVideoOnly);
        metaData.setScreenData(isScreenData);
        metaData.setStreamName(streamName);
        metaData.setInteriewPodId(interiewPodId);
        metaData = update(metaData);
        return metaData.getId();
    } catch (Exception ex2) {
        log.error("[add]: ", ex2);
    }
    return null;
}
Also used : RecordingMetaData(org.apache.openmeetings.db.entity.record.RecordingMetaData)

Example 2 with RecordingMetaData

use of org.apache.openmeetings.db.entity.record.RecordingMetaData in project openmeetings by apache.

the class VideoInfo method update.

public VideoInfo update(AjaxRequestTarget target, BaseFileItem _r) {
    boolean reConvEnabled = false;
    boolean exists = false;
    if (_r instanceof Recording) {
        Recording r = (Recording) _r;
        rm.setObject(r);
        exists = r.exists();
        try {
            String name = null;
            if (r.getRoomId() != null) {
                Room room = roomDao.get(r.getRoomId());
                if (room != null) {
                    name = room.getName();
                    isInterview = Room.Type.interview == room.getType();
                }
            }
            roomName.setObject(name);
        } catch (Exception e) {
        // no-op
        }
        if (r.getOwnerId() != null && r.getOwnerId().equals(getUserId()) && r.getStatus() != Status.RECORDING && r.getStatus() != Status.CONVERTING) {
            List<RecordingMetaData> metas = metaDao.getByRecording(r.getId());
            for (RecordingMetaData meta : metas) {
                if (r.getRoomId() == null || !getRecordingMetaData(r.getRoomId(), meta.getStreamName()).exists()) {
                    break;
                }
            }
            reConvEnabled = !metas.isEmpty();
        }
    }
    reConvert.setEnabled(reConvEnabled);
    downloadBtn.setEnabled(exists && !_r.isReadOnly());
    share.setEnabled(exists && !_r.isReadOnly());
    if (target != null) {
        target.add(form);
    }
    return this;
}
Also used : OmFileHelper.getRecordingMetaData(org.apache.openmeetings.util.OmFileHelper.getRecordingMetaData) RecordingMetaData(org.apache.openmeetings.db.entity.record.RecordingMetaData) Recording(org.apache.openmeetings.db.entity.record.Recording) Room(org.apache.openmeetings.db.entity.room.Room)

Example 3 with RecordingMetaData

use of org.apache.openmeetings.db.entity.record.RecordingMetaData in project openmeetings by apache.

the class InterviewConverter method mergeAudioToWaves.

private String[] mergeAudioToWaves(List<File> waveFiles, File wav, List<RecordingMetaData> metaDataList, ReConverterParams rcv) throws IOException {
    String[] cmdSox = new String[waveFiles.size() + 5];
    cmdSox[0] = this.getPathToSoX();
    cmdSox[1] = "-m";
    int counter = 2;
    for (File _wav : waveFiles) {
        for (RecordingMetaData metaData : metaDataList) {
            String hashFileFullNameStored = metaData.getFullWavAudioData();
            if (hashFileFullNameStored.equals(_wav.getName())) {
                if (metaData.getInteriewPodId() == 1) {
                    cmdSox[counter] = "-v " + rcv.leftSideLoud;
                    counter++;
                }
                if (metaData.getInteriewPodId() == 2) {
                    cmdSox[counter] = "-v " + rcv.rightSideLoud;
                    counter++;
                }
            }
        }
        cmdSox[counter] = _wav.getCanonicalPath();
        counter++;
    }
    cmdSox[counter] = wav.getCanonicalPath();
    return cmdSox;
}
Also used : OmFileHelper.getRecordingMetaData(org.apache.openmeetings.util.OmFileHelper.getRecordingMetaData) RecordingMetaData(org.apache.openmeetings.db.entity.record.RecordingMetaData) File(java.io.File)

Example 4 with RecordingMetaData

use of org.apache.openmeetings.db.entity.record.RecordingMetaData in project openmeetings by apache.

the class InterviewConverter method startConversion.

public void startConversion(Long id, boolean reconversion, ReConverterParams rcv) {
    Recording r = null;
    try {
        r = recordingDao.get(id);
        log.debug("recording {}", r.getId());
        if (Strings.isEmpty(r.getHash())) {
            r.setHash(UUID.randomUUID().toString());
        }
        r.setStatus(Recording.Status.CONVERTING);
        r = recordingDao.update(r);
        ProcessResultList logs = new ProcessResultList();
        List<File> waveFiles = new ArrayList<>();
        File streamFolder = getStreamFolder(r);
        List<RecordingMetaData> metaDataList = metaDataDao.getAudioMetaDataByRecording(r.getId());
        stripAudioFirstPass(r, logs, waveFiles, streamFolder, metaDataList);
        // Merge Wave to Full Length
        File streamFolderGeneral = getStreamsHibernateDir();
        File wav = new File(streamFolder, String.format("INTERVIEW_%s_FINAL_WAVE.wav", r.getId()));
        deleteFileIfExists(wav);
        if (waveFiles.isEmpty()) {
            // create default Audio to merge it.
            // strip to content length
            File outputWav = new File(streamFolderGeneral, "one_second.wav");
            // Calculate delta at beginning
            double deltaPadding = diffSeconds(r.getRecordEnd(), r.getRecordStart());
            String[] cmdSox = new String[] { getPathToSoX(), outputWav.getCanonicalPath(), wav.getCanonicalPath(), "pad", "0", String.valueOf(deltaPadding) };
            logs.add(ProcessHelper.executeScript("generateSampleAudio", cmdSox));
        } else if (waveFiles.size() == 1) {
            wav = waveFiles.get(0);
        } else {
            String[] soxArgs;
            if (reconversion) {
                soxArgs = mergeAudioToWaves(waveFiles, wav, metaDataList, rcv);
            } else {
                soxArgs = mergeAudioToWaves(waveFiles, wav);
            }
            logs.add(ProcessHelper.executeScript("mergeAudioToWaves", soxArgs));
        }
        // Default Image for empty interview video pods
        final File defaultInterviewImageFile = new File(streamFolderGeneral, "default_interview_image.png");
        if (!defaultInterviewImageFile.exists()) {
            throw new ConversionException("defaultInterviewImageFile does not exist!");
        }
        final int flvWidth = 320;
        final int flvHeight = 260;
        // Merge Audio with Video / Calculate resulting FLV
        String[] pods = new String[2];
        boolean found = false;
        for (RecordingMetaData meta : metaDataList) {
            File flv = getRecordingMetaData(r.getRoomId(), meta.getStreamName());
            Integer pod = meta.getInteriewPodId();
            if (flv.exists() && pod != null && pod > 0 && pod < 3) {
                String path = flv.getCanonicalPath();
                /*
					 * CHECK FILE:
					 * ffmpeg -i rec_316_stream_567_2013_08_28_11_51_45.flv -v error -f null file.null
					 */
                String[] args = new String[] { getPathToFFMPEG(), "-y", "-i", path, // only input files with video will be treated as video sources
                "-an", "-v", "error", "-f", "null", "file.null" };
                ProcessResult res = ProcessHelper.executeScript("checkFlvPod_" + pod, args, true);
                logs.add(res);
                if (res.isOk()) {
                    long diff = diff(meta.getRecordStart(), meta.getRecording().getRecordStart());
                    if (diff != 0L) {
                        // stub to add
                        // ffmpeg -y -loop 1 -i /home/solomax/work/openmeetings/branches/3.0.x/dist/red5/webapps/openmeetings/streams/hibernate/default_interview_image.jpg -filter_complex '[0:0]scale=320:260' -c:v libx264 -t 00:00:29.059 -pix_fmt yuv420p out.flv
                        File podFB = new File(streamFolder, String.format("%s_pod_%s_blank.flv", meta.getStreamName(), pod));
                        String podPB = podFB.getCanonicalPath();
                        String[] argsPodB = new String[] { // 
                        getPathToFFMPEG(), // 
                        "-y", // 
                        "-loop", // 
                        "1", // 
                        "-i", // 
                        defaultInterviewImageFile.getCanonicalPath(), // 
                        "-filter_complex", // 
                        String.format("[0:0]scale=%1$d:%2$d", flvWidth, flvHeight), // 
                        "-c:v", // 
                        "libx264", // 
                        "-t", // 
                        formatMillis(diff), // 
                        "-pix_fmt", // 
                        "yuv420p", podPB };
                        logs.add(ProcessHelper.executeScript("blankFlvPod_" + pod, argsPodB));
                        // ffmpeg -y -i out.flv -i rec_15_stream_4_2014_07_15_20_41_03.flv -filter_complex '[0:0]setsar=1/1[sarfix];[1:0]scale=320:260,setsar=1/1[scale];[sarfix] [scale] concat=n=2:v=1:a=0 [v]' -map '[v]'  output1.flv
                        File podF = new File(streamFolder, OmFileHelper.getName(meta.getStreamName() + "_pod_" + pod, EXTENSION_FLV));
                        String podP = podF.getCanonicalPath();
                        String[] argsPod = new String[] { // 
                        getPathToFFMPEG(), // 
                        "-y", // 
                        "-i", // 
                        podPB, // 
                        "-i", // 
                        path, // 
                        "-filter_complex", // 
                        String.format("[0:0]setsar=1/1[sarfix];[1:0]scale=%1$d:%2$d,setsar=1/1[scale];[sarfix] [scale] concat=n=2:v=1:a=0 [v]", flvWidth, flvHeight), // 
                        "-map", // 
                        "[v]", podP };
                        logs.add(ProcessHelper.executeScript("shiftedFlvPod_" + pod, argsPod));
                        pods[pod - 1] = podP;
                    } else {
                        pods[pod - 1] = path;
                    }
                }
                found = true;
            }
        }
        if (!found) {
            ProcessResult res = new ProcessResult();
            res.setProcess("CheckFlvFilesExists");
            res.setError("No valid pods found");
            res.setExitCode(-1);
            logs.add(res);
            return;
        }
        boolean shortest = false;
        List<String> args = new ArrayList<>();
        for (int i = 0; i < 2; ++i) {
            /*
				 * INSERT BLANK INSTEAD OF BAD PAD:
				 * ffmpeg -loop 1 -i default_interview_image.jpg -i rec_316_stream_569_2013_08_28_11_51_45.flv -filter_complex '[0:v]scale=320:260,pad=2*320:260[left];[1:v]scale=320:260[right];[left][right]overlay=main_w/2:0' -shortest -y out4.flv
				 *
				 * JUST MERGE:
				 * ffmpeg -i rec_316_stream_569_2013_08_28_11_51_45.flv -i rec_316_stream_569_2013_08_28_11_51_45.flv -filter_complex '[0:v]scale=320:260,pad=2*320:260[left];[1:v]scale=320:260[right];[left][right]overlay=main_w/2:0' -y out4.flv
				 */
            if (pods[i] == null) {
                shortest = true;
                args.add("-loop");
                args.add("1");
                args.add("-i");
                args.add(defaultInterviewImageFile.getCanonicalPath());
            } else {
                args.add("-i");
                args.add(pods[i]);
            }
        }
        args.add("-i");
        args.add(wav.getCanonicalPath());
        args.add("-filter_complex");
        args.add(String.format("[0:v]scale=%1$d:%2$d,pad=2*%1$d:%2$d[left];[1:v]scale=%1$d:%2$d[right];[left][right]overlay=main_w/2:0%3$s", flvWidth, flvHeight, shortest ? ":shortest=1" : ""));
        if (shortest) {
            args.add("-shortest");
        }
        args.add("-map");
        args.add("0:0");
        args.add("-map");
        args.add("1:0");
        args.add("-map");
        args.add("2:0");
        args.add("-qmax");
        args.add("1");
        args.add("-qmin");
        args.add("1");
        r.setWidth(2 * flvWidth);
        r.setHeight(flvHeight);
        String mp4path = convertToMp4(r, args, logs);
        postProcess(r, mp4path, logs, waveFiles);
    } catch (Exception err) {
        log.error("[startConversion]", err);
        r.setStatus(Recording.Status.ERROR);
    }
    recordingDao.update(r);
}
Also used : ArrayList(java.util.ArrayList) ProcessResult(org.apache.openmeetings.util.process.ProcessResult) IOException(java.io.IOException) ProcessResultList(org.apache.openmeetings.util.process.ProcessResultList) OmFileHelper.getRecordingMetaData(org.apache.openmeetings.util.OmFileHelper.getRecordingMetaData) RecordingMetaData(org.apache.openmeetings.db.entity.record.RecordingMetaData) Recording(org.apache.openmeetings.db.entity.record.Recording) File(java.io.File)

Example 5 with RecordingMetaData

use of org.apache.openmeetings.db.entity.record.RecordingMetaData in project openmeetings by apache.

the class RecordingConverter method startConversion.

@Override
public void startConversion(Long id) {
    Recording r = recordingDao.get(id);
    if (r == null) {
        log.warn("Conversion is NOT started. Recording with ID {} is not found", id);
        return;
    }
    try {
        log.debug("recording {}", r.getId());
        ProcessResultList logs = new ProcessResultList();
        List<File> waveFiles = new ArrayList<>();
        File streamFolder = getStreamFolder(r);
        RecordingMetaData screenMetaData = metaDataDao.getScreenByRecording(r.getId());
        if (screenMetaData == null) {
            throw new ConversionException("screenMetaData is Null recordingId " + r.getId());
        }
        if (screenMetaData.getStreamStatus() == Status.NONE) {
            printMetaInfo(screenMetaData, "StartConversion");
            throw new ConversionException("Stream has not been started, error in recording");
        }
        if (Strings.isEmpty(r.getHash())) {
            r.setHash(UUID.randomUUID().toString());
        }
        r.setStatus(Recording.Status.CONVERTING);
        r = recordingDao.update(r);
        screenMetaData = waitForTheStream(screenMetaData.getId());
        stripAudioFirstPass(r, logs, waveFiles, streamFolder);
        // Merge Wave to Full Length
        File wav = new File(streamFolder, screenMetaData.getStreamName() + "_FINAL_WAVE.wav");
        if (waveFiles.isEmpty()) {
            // create default Audio to merge it. strip to content length
            String oneSecWav = new File(getStreamsHibernateDir(), "one_second.wav").getCanonicalPath();
            // Calculate delta at beginning
            double duration = diffSeconds(r.getRecordEnd(), r.getRecordStart());
            String[] cmd = new String[] { getPathToSoX(), oneSecWav, wav.getCanonicalPath(), "pad", "0", String.valueOf(duration) };
            logs.add(ProcessHelper.executeScript("generateSampleAudio", cmd));
        } else if (waveFiles.size() == 1) {
            wav = waveFiles.get(0);
        } else {
            String[] soxArgs = mergeAudioToWaves(waveFiles, wav);
            logs.add(ProcessHelper.executeScript("mergeAudioToWaves", soxArgs));
        }
        screenMetaData.setFullWavAudioData(wav.getName());
        metaDataDao.update(screenMetaData);
        // Merge Audio with Video / Calculate resulting FLV
        String inputScreenFullFlv = new File(streamFolder, OmFileHelper.getName(screenMetaData.getStreamName(), EXTENSION_FLV)).getCanonicalPath();
        // ffmpeg -vcodec flv -qscale 9.5 -r 25 -ar 22050 -ab 32k -s 320x240
        // -i 65318fb5c54b1bc1b1bca077b493a914_28_12_2009_23_38_17_FINAL_WAVE.wav
        // -i 65318fb5c54b1bc1b1bca077b493a914_28_12_2009_23_38_17.flv
        // final1.flv
        int flvWidth = r.getWidth();
        int flvHeight = r.getHeight();
        log.debug("flvWidth -1- {}", flvWidth);
        log.debug("flvHeight -1- {}", flvHeight);
        flvWidth = (int) (16. * flvWidth / 16);
        flvHeight = (int) (16. * flvHeight / 16);
        log.debug("flvWidth -2- {}", flvWidth);
        log.debug("flvHeight -2- {}", flvHeight);
        r.setWidth(flvWidth);
        r.setHeight(flvHeight);
        String mp4path = convertToMp4(r, Arrays.asList("-itsoffset", formatMillis(diff(screenMetaData.getRecordStart(), r.getRecordStart())), "-i", inputScreenFullFlv, "-i", wav.getCanonicalPath()), logs);
        postProcess(r, mp4path, logs, waveFiles);
    } catch (Exception err) {
        log.error("[startConversion]", err);
        r.setStatus(Recording.Status.ERROR);
    }
    recordingDao.update(r);
}
Also used : RecordingMetaData(org.apache.openmeetings.db.entity.record.RecordingMetaData) ArrayList(java.util.ArrayList) Recording(org.apache.openmeetings.db.entity.record.Recording) File(java.io.File) ProcessResultList(org.apache.openmeetings.util.process.ProcessResultList)

Aggregations

RecordingMetaData (org.apache.openmeetings.db.entity.record.RecordingMetaData)9 File (java.io.File)5 OmFileHelper.getRecordingMetaData (org.apache.openmeetings.util.OmFileHelper.getRecordingMetaData)5 Recording (org.apache.openmeetings.db.entity.record.Recording)4 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 ProcessResultList (org.apache.openmeetings.util.process.ProcessResultList)2 RecordingMetaDelta (org.apache.openmeetings.db.entity.record.RecordingMetaDelta)1 Room (org.apache.openmeetings.db.entity.room.Room)1 ProcessResult (org.apache.openmeetings.util.process.ProcessResult)1 Serializer (org.simpleframework.xml.Serializer)1 Registry (org.simpleframework.xml.convert.Registry)1 RegistryStrategy (org.simpleframework.xml.convert.RegistryStrategy)1 Persister (org.simpleframework.xml.core.Persister)1 Strategy (org.simpleframework.xml.strategy.Strategy)1 RegistryMatcher (org.simpleframework.xml.transform.RegistryMatcher)1