Search in sources :

Example 31 with VideoEvent

use of com.att.aro.core.videoanalysis.pojo.VideoEvent in project VideoOptimzer by attdevsupport.

the class VideoConcurrentSessionImpl method manifestConcurrentSessions.

public List<VideoConcurrentSession> manifestConcurrentSessions(SortedMap<Double, VideoStream> videoStreamMap) {
    List<VideoConcurrentSession> concurrentSessionList = new ArrayList<VideoConcurrentSession>();
    if (MapUtils.isNotEmpty(videoStreamCollection)) {
        for (VideoStream videoStream : videoStreamMap.values()) {
            if (videoStream.isSelected()) {
                ArrayList<Double> sessionStartTimes = new ArrayList<>();
                ArrayList<Double> sessionEndTimes = new ArrayList<>();
                ArrayList<Session> sessionList = new ArrayList<>();
                SortedMap<String, VideoEvent> videoEventList = videoStream.getVideoEventMap();
                for (VideoEvent veEntry : videoEventList.values()) {
                    Session session = veEntry.getSession();
                    if (!sessionList.contains(session)) {
                        sessionList.add(session);
                        sessionStartTimes.add(session.getSessionStartTime());
                        sessionEndTimes.add(session.getSessionEndTime());
                    }
                }
                VideoConcurrentSession videoConcurrentSession = findConcurrency(sessionStartTimes, sessionEndTimes);
                if (videoConcurrentSession != null && videoConcurrentSession.getConcurrentSessionCount() > 0) {
                    videoConcurrentSession.setVideoName(videoStream.getManifest().getVideoName());
                    concurrentSessionList.add(videoConcurrentSession);
                }
            }
        }
    }
    return concurrentSessionList;
}
Also used : VideoConcurrentSession(com.att.aro.core.bestpractice.pojo.VideoConcurrentSession) ArrayList(java.util.ArrayList) VideoStream(com.att.aro.core.videoanalysis.pojo.VideoStream) VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) Session(com.att.aro.core.packetanalysis.pojo.Session) VideoConcurrentSession(com.att.aro.core.bestpractice.pojo.VideoConcurrentSession)

Example 32 with VideoEvent

use of com.att.aro.core.videoanalysis.pojo.VideoEvent in project VideoOptimzer by attdevsupport.

the class VideoSegmentPacingImpl method runTest.

@Override
public AbstractBestPracticeResult runTest(PacketAnalyzerResult tracedata) {
    BPResultType bpResultType = BPResultType.SELF_TEST;
    VideoChunkPacingResult result = new VideoChunkPacingResult();
    Double dlFirst = Double.MAX_VALUE;
    Double dlLast = 0D;
    int count = 0;
    init(result);
    if ((streamingVideoData = tracedata.getStreamingVideoData()) != null && (videoStreamCollection = streamingVideoData.getVideoStreamMap()) != null && MapUtils.isNotEmpty(videoStreamCollection)) {
        selectedCount = streamingVideoData.getSelectedManifestCount();
        invalidCount = streamingVideoData.getInvalidManifestCount();
        if (selectedCount == 0) {
            if (invalidCount == videoStreamCollection.size()) {
                result.setResultText(invalidManifestsFound);
            } else if (invalidCount > 0) {
                result.setResultText(noManifestsSelectedMixed);
            } else {
                result.setResultText(noManifestsSelected);
            }
            bpResultType = BPResultType.CONFIG_REQUIRED;
            result.setResultExcelText(bpResultType.getDescription());
            result.setSelfTest(false);
        } else if (selectedCount > 1) {
            result.setResultText(multipleManifestsSelected);
            bpResultType = BPResultType.CONFIG_REQUIRED;
            result.setResultExcelText(bpResultType.getDescription());
            result.setSelfTest(false);
        } else {
            for (VideoStream videoStream : videoStreamCollection.values()) {
                if (videoStream != null && videoStream.isSelected() && !videoStream.getVideoEventsBySegment().isEmpty()) {
                    for (VideoEvent videoEvent : videoStream.getVideoEventsBySegment()) {
                        if (videoEvent.isNormalSegment()) {
                            count++;
                            double dlTime = videoEvent.getDLLastTimestamp();
                            if (dlTime < dlFirst) {
                                // look for earliest download of valid segment in a stream
                                dlFirst = dlTime;
                            }
                            if (dlTime > dlLast) {
                                // look for last download of valid segment in a stream
                                dlLast = dlTime;
                            }
                        }
                    }
                    break;
                }
            }
            double segmentPacing = 0;
            if (count > 1) {
                segmentPacing = (dlLast - dlFirst) / (count - 1);
            }
            bpResultType = BPResultType.SELF_TEST;
            result.setResultText(MessageFormat.format(textResults, count == 1 ? "was" : "were", count, count == 1 ? "" : "different", count == 1 ? "" : "s", count == 1 ? "was" : "were", segmentPacing, MathUtils.equals(segmentPacing, 1.0) ? "" : "s"));
            result.setResultExcelText(MessageFormat.format(textExcelResults, bpResultType.getDescription(), count, count <= 1 ? "" : "different", count <= 1 ? "" : "s", count <= 1 ? "was" : "were", segmentPacing, segmentPacing <= 1.0 ? "" : "s"));
            result.setChunkPacing(segmentPacing);
            result.setSelfTest(true);
        }
    } else {
        result.setResultText(noData);
        bpResultType = BPResultType.NO_DATA;
        result.setResultExcelText(bpResultType.getDescription());
    }
    result.setResultType(bpResultType);
    return result;
}
Also used : BPResultType(com.att.aro.core.bestpractice.pojo.BPResultType) VideoStream(com.att.aro.core.videoanalysis.pojo.VideoStream) VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) VideoChunkPacingResult(com.att.aro.core.bestpractice.pojo.VideoChunkPacingResult)

Example 33 with VideoEvent

use of com.att.aro.core.videoanalysis.pojo.VideoEvent in project VideoOptimzer by attdevsupport.

the class VideoThroughputPlot method populate.

@Override
public void populate(XYPlot plot, AROTraceData analysis) {
    if (analysis == null) {
        LOGGER.info("no trace data here");
    } else {
        XYItemRenderer videoRenderer = plot.getRenderer();
        videoRenderer.setBaseToolTipGenerator(new XYToolTipGenerator() {

            @Override
            public String generateToolTip(XYDataset dataset, int series, int item) {
                if (dataset.getSeriesCount() > 1) {
                    if (series == 0 && item < videoEventList.size()) {
                        VideoEvent videoEvent = videoEventList.get(item);
                        return getToolTip(videoEvent);
                    } else if (series == 1 && item < audioEventList.size()) {
                        VideoEvent audioEvent = audioEventList.get(item);
                        return getToolTip(audioEvent);
                    } else {
                        return "";
                    }
                } else {
                    if (item < eventList.size()) {
                        return getToolTip(eventList.get(item));
                    } else {
                        return "";
                    }
                }
            }

            private String getToolTip(VideoEvent event) {
                StringBuffer tooltipValue = new StringBuffer();
                tooltipValue.append(String.format("%.0f,%s, %.2f,%.2f,%.3f,%.3f", (double) event.getSegmentID(), event.getQuality(), event.getDLTimeStamp(), event.getDLLastTimestamp(), event.getDuration(), getThroughput(event)));
                String[] value = tooltipValue.toString().split(",");
                return (MessageFormat.format(ResourceBundleHelper.getDefaultBundle().getString("videotab.throughput.tooltip"), value[0], value[1], value[2], value[3], value[4], value[5]));
            }
        });
    }
    XYSeriesCollection collection = new XYSeriesCollection();
    if (!isMuxed) {
        if (optionSelected == SegmentOptions.DEFAULT) {
            collection.addSeries(videoEventSeries);
            collection.addSeries(audioEventSeries);
        } else {
            collection.addSeries(optionSelected == SegmentOptions.VIDEO ? videoEventSeries : audioEventSeries);
        }
        plot.setDataset(collection);
    } else {
        collection.addSeries(createMuxedSeries(!videoEventSeries.isEmpty() ? videoEventSeries : audioEventSeries));
        plot.setDataset(collection);
    }
}
Also used : XYDataset(org.jfree.data.xy.XYDataset) VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) XYItemRenderer(org.jfree.chart.renderer.xy.XYItemRenderer) XYToolTipGenerator(org.jfree.chart.labels.XYToolTipGenerator) XYSeriesCollection(org.jfree.data.xy.XYSeriesCollection)

Example 34 with VideoEvent

use of com.att.aro.core.videoanalysis.pojo.VideoEvent in project VideoOptimzer by attdevsupport.

the class VideoSegmentAnalyzer method syncWithAudio.

/**
 * <pre>
 * Scan through all audio event related to videoEvent Starting with audio event
 * from before the videoEvent Record all audio segments associated with Video
 * segment. Including partial overlaps, often audio and video segments do not
 * start at the same time.
 *
 * @param startupOffset
 *
 * @param videoStream      contains collections of Video, Audio and Captioning
 * @param audioStreamMap   contains all audio in videoStream (when non-muxed)
 *                         <key definition: segmentStartTime, endTS(in
 *                         milliseconds)>
 * @param videoEvent       The video segment to receive audio linkage
 * @param appliedStallTime
 * @return audioEvent associated with a stall
 */
private VideoEvent syncWithAudio(double startupOffset, VideoStream videoStream, TreeMap<String, VideoEvent> audioStreamMap, VideoEvent videoEvent) {
    VideoEvent audioEvent = null;
    String segmentStartTime = VideoStream.generateTimestampKey(videoEvent.getSegmentStartTime());
    String segmentEndTime = VideoStream.generateTimestampKey(videoEvent.getSegmentStartTime() + videoEvent.getDuration());
    String audioKeyStart = null;
    String audioKeyEnd = null;
    try {
        audioKeyStart = audioStreamMap.lowerKey(segmentStartTime);
        audioKeyEnd = audioStreamMap.higherKey(segmentEndTime);
        String key = audioKeyStart;
        while (!key.equals(audioKeyEnd)) {
            VideoEvent lastAudioEvent = audioEvent;
            VideoEvent tempEvent = audioStreamMap.get(key);
            if (tempEvent.isSelected()) {
                audioEvent = tempEvent;
                calcAudioTime(videoEvent, audioEvent);
                double audioPlaytime = audioEvent.getSegmentStartTime() + startupOffset + totalStallOffset;
                if (audioEvent.getDLLastTimestamp() > audioPlaytime) {
                    double stallPoint = lastAudioEvent.getSegmentStartTime() + audioEvent.getDuration() - videoPrefs.getStallPausePoint();
                    stallOffset = audioEvent.getDLLastTimestamp() - audioEvent.getPlayTime() + getStallRecovery();
                    stallOffset = calcSegmentStallOffset(startupOffset, audioEvent, totalStallOffset);
                    stallOffset = audioEvent.getDLLastTimestamp() - audioPlaytime + getStallRecovery();
                    audioEvent.setStallTime(stallOffset);
                    videoEvent.setStallTime(stallOffset);
                    totalStallOffset += stallOffset;
                    videoStall = new VideoStall(stallPoint);
                    videoStall.setSegmentTryingToPlay(audioEvent);
                    videoStall.setStallEndTimestamp(audioEvent.getPlayTime());
                    double resumePoint = audioEvent.getDLLastTimestamp() + getStallRecovery();
                    videoStall.setStallEndTimestamp(resumePoint);
                    stalls.add(videoStall);
                }
            }
            // advance to next segmentStartTime
            key = audioStreamMap.higherKey(StringUtils.substringBefore(key, ":") + "z");
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return audioEvent;
}
Also used : VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) VideoStall(com.att.aro.core.packetanalysis.pojo.VideoStall)

Example 35 with VideoEvent

use of com.att.aro.core.videoanalysis.pojo.VideoEvent in project VideoOptimzer by attdevsupport.

the class VideoSegmentAnalyzer method generateByteBufferData.

/**
 * Scans VideoStream to produce/populate - VideoStream.byteBufferList -
 * VideoStream.toolTipDetailMap
 *
 * @param videoStream
 */
private void generateByteBufferData(VideoStream videoStream) {
    VideoEvent eventPlay = null;
    Double buffer = 0D;
    VideoEvent eventDL;
    double timeKey = 0D;
    this.videoStream = videoStream;
    videoStream.clearBufferOccupancyData();
    TreeMap<Double, VideoEvent> mergedPlayMap = new TreeMap<>();
    TreeMap<String, VideoEvent> mergedMap = new TreeMap<>();
    videoStream.getVideoEventMap().entrySet().stream().filter((f) -> !f.getValue().isFailedRequest()).forEach(e -> {
        mergedMap.put(e.getKey(), e.getValue());
    });
    videoStream.getAudioEventMap().entrySet().stream().filter((f) -> !f.getValue().isFailedRequest()).forEach(e -> {
        mergedMap.put(e.getKey(), e.getValue());
    });
    byteBufferList = videoStream.getByteBufferList();
    for (String key : mergedMap.keySet()) {
        eventDL = mergedMap.get(key);
        if (eventDL.isNormalSegment()) {
            if (timeKey > 0 && (timeKey < eventDL.getEndTS())) {
                eventPlay = mergedPlayMap.get(timeKey);
                while (eventPlay != null && eventPlay.getPlayTime() <= eventDL.getEndTS()) {
                    mergedPlayMap.remove(eventPlay.getPlayTime());
                    buffer = addByteBufferPoints(buffer, eventPlay, eventPlay.getPlayTime(), -eventPlay.getSize());
                    timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
                    eventPlay = mergedPlayMap.isEmpty() ? null : mergedPlayMap.get(mergedPlayMap.firstKey());
                }
            }
            mergedPlayMap.put(eventDL.getPlayTime(), eventDL);
            timeKey = mergedPlayMap.firstKey();
            buffer = addByteBufferPoints(buffer, eventDL, eventDL.getEndTS(), eventDL.getSize());
        }
    }
    timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
    while (!mergedPlayMap.isEmpty()) {
        eventPlay = mergedPlayMap.remove(timeKey);
        buffer = addByteBufferPoints(buffer, eventPlay, eventPlay.getPlayTime(), -eventPlay.getSize());
        timeKey = mergedPlayMap.isEmpty() ? 0 : mergedPlayMap.firstKey();
        eventPlay = mergedPlayMap.isEmpty() ? null : mergedPlayMap.get(mergedPlayMap.firstKey());
    }
}
Also used : VideoStreamStartup(com.att.aro.core.peripheral.pojo.VideoStreamStartup) StringUtils(org.apache.commons.lang.StringUtils) UserEvent(com.att.aro.core.peripheral.pojo.UserEvent) ValidationStartup(com.att.aro.core.peripheral.pojo.VideoStreamStartup.ValidationStartup) AbstractTraceResult(com.att.aro.core.packetanalysis.pojo.AbstractTraceResult) Autowired(org.springframework.beans.factory.annotation.Autowired) DUPLICATE_HANDLING(com.att.aro.core.videoanalysis.pojo.VideoUsagePrefs.DUPLICATE_HANDLING) ArrayList(java.util.ArrayList) Logger(org.apache.log4j.Logger) XYPair(com.att.aro.core.videoanalysis.XYPair) PacketAnalyzerResult(com.att.aro.core.packetanalysis.pojo.PacketAnalyzerResult) Nonnull(javax.annotation.Nonnull) VideoStall(com.att.aro.core.packetanalysis.pojo.VideoStall) VideoStreamStartupData(com.att.aro.core.peripheral.pojo.VideoStreamStartupData) VideoUsagePrefs(com.att.aro.core.videoanalysis.pojo.VideoUsagePrefs) NonNull(lombok.NonNull) NavigableMap(java.util.NavigableMap) VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) VideoStream(com.att.aro.core.videoanalysis.pojo.VideoStream) List(java.util.List) StreamingVideoData(com.att.aro.core.videoanalysis.pojo.StreamingVideoData) UserEventType(com.att.aro.core.peripheral.pojo.UserEvent.UserEventType) TreeMap(java.util.TreeMap) CollectionUtils(org.springframework.util.CollectionUtils) LogManager(org.apache.log4j.LogManager) IVideoUsagePrefsManager(com.att.aro.core.videoanalysis.IVideoUsagePrefsManager) TraceDirectoryResult(com.att.aro.core.packetanalysis.pojo.TraceDirectoryResult) Collections(java.util.Collections) SortedMap(java.util.SortedMap) VideoEvent(com.att.aro.core.videoanalysis.pojo.VideoEvent) TreeMap(java.util.TreeMap)

Aggregations

VideoEvent (com.att.aro.core.videoanalysis.pojo.VideoEvent)48 VideoStream (com.att.aro.core.videoanalysis.pojo.VideoStream)19 ArrayList (java.util.ArrayList)12 TreeMap (java.util.TreeMap)7 BPResultType (com.att.aro.core.bestpractice.pojo.BPResultType)5 VideoStall (com.att.aro.core.packetanalysis.pojo.VideoStall)5 StreamingVideoData (com.att.aro.core.videoanalysis.pojo.StreamingVideoData)5 HashMap (java.util.HashMap)4 List (java.util.List)4 XYDataset (org.jfree.data.xy.XYDataset)4 AbstractTraceResult (com.att.aro.core.packetanalysis.pojo.AbstractTraceResult)3 TraceDirectoryResult (com.att.aro.core.packetanalysis.pojo.TraceDirectoryResult)3 UserEvent (com.att.aro.core.peripheral.pojo.UserEvent)3 VideoStreamStartup (com.att.aro.core.peripheral.pojo.VideoStreamStartup)3 VideoStreamStartupData (com.att.aro.core.peripheral.pojo.VideoStreamStartupData)3 DUPLICATE_HANDLING (com.att.aro.core.videoanalysis.pojo.VideoUsagePrefs.DUPLICATE_HANDLING)3 Collections (java.util.Collections)3 StringUtils (org.apache.commons.lang.StringUtils)3 LogManager (org.apache.log4j.LogManager)3 Logger (org.apache.log4j.Logger)3