use of com.att.aro.core.video.pojo.Segment in project VideoOptimzer by attdevsupport.
the class VideoTrafficInferencer method prepareCandidates.
private List<Segment> prepareCandidates(Map<Double, HttpRequestResponseInfo> reqMap, VideoManifest manifest) {
List<Segment> candidateList = new ArrayList<Segment>();
candidateList.add(new Segment(manifest, null, -1, -1, -1, -1, -1));
for (HttpRequestResponseInfo req : reqMap.values()) {
// Reducing 10000 bytes in the minimum to account for lost packets in the traffic file
double minVideoSegmentSize = (req.getContentLength() - 30000) / TRANSFER_OVERHEAD_HIGH;
double maxVideoSegmentSize = req.getContentLength() / TRANSFER_OVERHEAD_LOW;
double minAudioRequestSize = manifest.getMedianAudioTrackSize() * TRANSFER_OVERHEAD_LOW;
double maxAudioRequestSize = manifest.getMedianAudioTrackSize() * TRANSFER_OVERHEAD_HIGH;
double minVideoRequestSize = (manifest.getMaxVideoSegmentIndexSize() + 30000) * TRANSFER_OVERHEAD_HIGH;
if (minAudioRequestSize < req.getContentLength() && req.getContentLength() < maxAudioRequestSize) {
req.setAudioPossibility(true);
possibleAudioRequestMap.put(req.getRequestCounterCSI(), req);
}
if (req.getContentLength() > minVideoRequestSize) {
req.setVideoPossibility(true);
} else {
nonSegmentRequestMap.put(req.getRequestCounterCSI(), req);
continue;
}
if (manifest != null && manifest.getTracks() != null) {
for (Track track : manifest.getTracks()) {
if (track.getMediaType() == MediaType.VIDEO) {
int counter = 0;
for (int size : track.getSegmentSizes()) {
counter++;
if (minVideoSegmentSize <= size && size <= maxVideoSegmentSize) {
candidateList.add(new Segment(manifest, track, counter, size, req.getKey(), req.getRequestCounterCSI(), candidateList.size() - 1));
}
}
}
}
}
}
candidateList.add(new Segment(manifest, null, -1, -1, -1, -1, candidateList.size() - 1));
return candidateList;
}
use of com.att.aro.core.video.pojo.Segment in project VideoOptimzer by attdevsupport.
the class VideoTrafficInferencer method inferVideoData.
public StreamingVideoData inferVideoData(AbstractTraceResult result, List<Session> sessionlist, String manifestFilePath) {
videoRequestMap = new HashSet<>();
nonSegmentRequestMap = new HashMap<>();
possibleAudioRequestMap = new TreeMap<>();
streamingVideoData = new StreamingVideoData(result.getTraceDirectory());
boolean flag = false;
File manifestFile;
if (result.getTraceDirectory().equals(manifestFilePath)) {
CSIManifestAndState csiState = csiDataHelper.readData(manifestFilePath + System.getProperty("file.separator") + "CSI");
if (csiState.getAnalysisState().equals("Fail")) {
return streamingVideoData;
}
manifestFile = csiDataHelper.generateManifestPath(manifestFilePath, csiState.getManifestFileName());
} else {
flag = true;
manifestFile = new File(manifestFilePath);
}
byte[] fileContent;
VideoManifest videoManifest = new VideoManifest();
List<Track> tracks = new ArrayList<>();
String fileExtName = FilenameUtils.getExtension(manifestFile.getPath());
requestMap = generateRequestMap(sessionlist);
if (manifestFile.exists() && fileExtName != null) {
switch(fileExtName) {
case "json":
ObjectMapper mapper = new ObjectMapper();
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
try {
videoManifest = mapper.readValue(manifestFile, VideoManifest.class);
} catch (IOException ioe) {
LOGGER.error("Exception while parsing Manifest JSON for CSI", ioe);
}
break;
case "mpd":
try {
fileContent = Files.readAllBytes(manifestFile.toPath());
ManifestBuilderDASH manifestBuilderDASH = new ManifestBuilderDASH();
manifestBuilderDASH.create(requestMap.values().iterator().next(), fileContent, "blank");
for (ChildManifest cManifest : manifestBuilderDASH.getManifestCollection().getSegmentChildManifestListInOrder()) {
Track sTrack = new Track();
List<Integer> segmentSizes = new ArrayList<Integer>();
List<Double> segmentDurations = new ArrayList<Double>();
sTrack.setMediaType(cManifest.isVideo() ? MediaType.VIDEO : MediaType.AUDIO);
sTrack.setMediaBandwidth((float) cManifest.getBandwidth());
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentSizes.add(segment.getSize()));
cManifest.getSegmentInfoTrie().values().forEach((segment) -> segmentDurations.add(segment.getDuration()));
sTrack.setSegmentSizes(segmentSizes);
sTrack.setSegmentDurations(segmentDurations);
tracks.add(sTrack);
}
} catch (IOException ioe) {
LOGGER.error("Exception while parsing MPD for CSI", ioe);
}
videoManifest.setTracks(tracks);
case "m3u8":
try {
videoManifest = hlsManifestParseImpl.getManifest(videoManifest, manifestFile);
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
break;
default:
break;
}
}
List<Segment> candidateList = prepareCandidates(requestMap, videoManifest);
Map<Integer, List<Segment>> edgeDistanceMap = calculateDistancesAndPopulateAdjacencyList(candidateList, requestMap);
TreeMap<Integer, List<Integer>> lastNodes = findShortestPath(candidateList, edgeDistanceMap);
if (!lastNodes.isEmpty()) {
VideoStream videoStream = new VideoStream();
videoStream.setManifest(createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.MASTER, ContentType.MUXED));
streamingVideoData.addVideoStream(firstRequestTimeStamp, videoStream);
List<Segment> solution = getLikelySequences(candidateList, edgeDistanceMap, lastNodes);
Manifest manifest;
if (!solution.isEmpty()) {
for (Segment segment : solution) {
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.VIDEO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, segment, requestMap.get(segment.getRequestKey()));
videoRequestMap.add(requestMap.get(segment.getRequestKey()));
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
int segmentIndex = 0;
Track audioTrack;
if ((audioTrack = videoManifest.getTracks().stream().filter(track -> (track.getMediaType()).equals(MediaType.AUDIO)).findFirst().get()) != null) {
for (HttpRequestResponseInfo rrInfo : possibleAudioRequestMap.values()) {
if (!videoRequestMap.contains(rrInfo) && rrInfo.getTime() > videoStream.getFirstSegment().getDLTime()) {
Segment audioSegment = new Segment(videoManifest, videoManifest.getAudioTrack(), ++segmentIndex, audioTrack.getSegmentSizes().get(segmentIndex - 1), rrInfo.getKey(), rrInfo.getRequestCounterCSI(), -1);
manifest = createManifest(FilenameUtils.getBaseName(manifestFile.getPath()), ManifestType.CHILD, ContentType.AUDIO);
ChildManifest childManifest = new ChildManifest();
childManifest.setManifest(manifest);
VideoEvent videoEvent = new VideoEvent(getDefaultThumbnail(), manifest, audioSegment, rrInfo);
videoEvent.setChildManifest(childManifest);
videoStream.addVideoEvent(videoEvent);
}
}
}
}
}
if (flag) {
saveCSIManifestAndState(manifestFile.toString());
}
videoSegmentAnalyzer.process(result, streamingVideoData);
return streamingVideoData;
}
Aggregations