use of com.google.cloud.video.livestream.v1.AudioStream in project java-docs-samples by GoogleCloudPlatform.
the class CreateJobWithSetNumberImagesSpritesheet method createJobWithSetNumberImagesSpritesheet.
// Creates a job from an ad-hoc configuration and generates two spritesheets from the input video.
// Each spritesheet contains a set number of images.
public static void createJobWithSetNumberImagesSpritesheet(String projectId, String location, String inputUri, String outputUri) throws IOException {
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 = VideoStream.newBuilder().setH264(VideoStream.H264CodecSettings.newBuilder().setBitrateBps(550000).setFrameRate(60).setHeightPixels(360).setWidthPixels(640)).build();
AudioStream audioStream0 = AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
// Generates a 10x10 spritesheet of small images from the input video. To preserve the source
// aspect ratio, you should set the spriteWidthPixels field or the spriteHeightPixels
// field, but not both.
SpriteSheet smallSpriteSheet = SpriteSheet.newBuilder().setFilePrefix(smallSpritesheetFilePrefix).setSpriteHeightPixels(32).setSpriteWidthPixels(64).setColumnCount(10).setRowCount(10).setTotalCount(100).build();
// Generates a 10x10 spritesheet of larger images from the input video.
SpriteSheet largeSpriteSheet = SpriteSheet.newBuilder().setFilePrefix(largeSpritesheetFilePrefix).setSpriteHeightPixels(72).setSpriteWidthPixels(128).setColumnCount(10).setRowCount(10).setTotalCount(100).build();
JobConfig config = JobConfig.newBuilder().addInputs(Input.newBuilder().setKey("input0").setUri(inputUri)).setOutput(Output.newBuilder().setUri(outputUri)).addElementaryStreams(ElementaryStream.newBuilder().setKey("video_stream0").setVideoStream(videoStream0)).addElementaryStreams(ElementaryStream.newBuilder().setKey("audio_stream0").setAudioStream(audioStream0)).addMuxStreams(MuxStream.newBuilder().setKey("sd").setContainer("mp4").addElementaryStreams("video_stream0").addElementaryStreams("audio_stream0").build()).addSpriteSheets(// Add the spritesheet config to the job config
smallSpriteSheet).addSpriteSheets(// Add the spritesheet config to the job config
largeSpriteSheet).build();
var createJobRequest = CreateJobRequest.newBuilder().setJob(Job.newBuilder().setInputUri(inputUri).setOutputUri(outputUri).setConfig(config).build()).setParent(LocationName.of(projectId, location).toString()).build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
use of com.google.cloud.video.livestream.v1.AudioStream in project java-docs-samples by GoogleCloudPlatform.
the class CreateJobWithStaticOverlay method createJobWithStaticOverlay.
// Creates a job from an ad-hoc configuration and adds a static overlay to it.
public static void createJobWithStaticOverlay(String projectId, String location, String inputUri, String overlayImageUri, String outputUri) throws IOException {
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 = VideoStream.newBuilder().setH264(VideoStream.H264CodecSettings.newBuilder().setBitrateBps(550000).setFrameRate(60).setHeightPixels(360).setWidthPixels(640)).build();
AudioStream audioStream0 = AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
// Create the overlay image. Only JPEG is supported. Image resolution is based on output
// video resolution. To respect the original image aspect ratio, set either x or y to 0.0.
// This example stretches the overlay image the full width and half of the height of the
// output video.
Overlay.Image overlayImage = Overlay.Image.newBuilder().setUri(overlayImageUri).setResolution(NormalizedCoordinate.newBuilder().setX(1).setY(0.5).build()).setAlpha(1).build();
// Create the starting animation (when the overlay appears). Use the values x: 0 and y: 0 to
// position the top-left corner of the overlay in the top-left corner of the output video.
Overlay.Animation animationStart = Overlay.Animation.newBuilder().setAnimationStatic(AnimationStatic.newBuilder().setXy(NormalizedCoordinate.newBuilder().setX(0).setY(0).build()).setStartTimeOffset(Duration.newBuilder().setSeconds(0).build()).build()).build();
// Create the ending animation (when the overlay disappears). In this example, the overlay
// disappears at the 10-second mark in the output video.
Overlay.Animation animationEnd = Overlay.Animation.newBuilder().setAnimationEnd(AnimationEnd.newBuilder().setStartTimeOffset(Duration.newBuilder().setSeconds(10).build()).build()).build();
// Create the overlay and add the image and animations to it.
Overlay overlay = Overlay.newBuilder().setImage(overlayImage).addAnimations(animationStart).addAnimations(animationEnd).build();
JobConfig config = JobConfig.newBuilder().addInputs(Input.newBuilder().setKey("input0").setUri(inputUri)).setOutput(Output.newBuilder().setUri(outputUri)).addElementaryStreams(ElementaryStream.newBuilder().setKey("video_stream0").setVideoStream(videoStream0)).addElementaryStreams(ElementaryStream.newBuilder().setKey("audio_stream0").setAudioStream(audioStream0)).addMuxStreams(MuxStream.newBuilder().setKey("sd").setContainer("mp4").addElementaryStreams("video_stream0").addElementaryStreams("audio_stream0").build()).addOverlays(// Add the overlay to the job config
overlay).build();
var createJobRequest = CreateJobRequest.newBuilder().setJob(Job.newBuilder().setInputUri(inputUri).setOutputUri(outputUri).setConfig(config).build()).setParent(LocationName.of(projectId, location).toString()).build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
use of com.google.cloud.video.livestream.v1.AudioStream in project java-docs-samples by GoogleCloudPlatform.
the class CreateChannelWithBackupInput method createChannelWithBackupInput.
public static void createChannelWithBackupInput(String projectId, String location, String channelId, String primaryInputId, String backupInputId, String outputUri) throws InterruptedException, ExecutionException, TimeoutException, IOException {
// the "close" method on the client to safely clean up any remaining background resources.
try (LivestreamServiceClient livestreamServiceClient = LivestreamServiceClient.create()) {
VideoStream videoStream = VideoStream.newBuilder().setH264(H264CodecSettings.newBuilder().setProfile("main").setBitrateBps(1000000).setFrameRate(30).setHeightPixels(720).setWidthPixels(1280)).build();
AudioStream audioStream = AudioStream.newBuilder().setCodec("aac").setChannelCount(2).setBitrateBps(160000).build();
var createChannelRequest = CreateChannelRequest.newBuilder().setParent(LocationName.of(projectId, location).toString()).setChannelId(channelId).setChannel(Channel.newBuilder().addInputAttachments(0, InputAttachment.newBuilder().setKey("my-primary-input").setInput(InputName.of(projectId, location, primaryInputId).toString()).setAutomaticFailover(AutomaticFailover.newBuilder().addInputKeys("my-backup-input").build()).build()).addInputAttachments(1, InputAttachment.newBuilder().setKey("my-backup-input").setInput(InputName.of(projectId, location, backupInputId).toString())).setOutput(Output.newBuilder().setUri(outputUri).build()).addElementaryStreams(ElementaryStream.newBuilder().setKey("es_video").setVideoStream(videoStream)).addElementaryStreams(ElementaryStream.newBuilder().setKey("es_audio").setAudioStream(audioStream)).addMuxStreams(MuxStream.newBuilder().setKey("mux_video").addElementaryStreams("es_video").setSegmentSettings(SegmentSettings.newBuilder().setSegmentDuration(Duration.newBuilder().setSeconds(2).build()).build()).build()).addMuxStreams(MuxStream.newBuilder().setKey("mux_audio").addElementaryStreams("es_audio").setSegmentSettings(SegmentSettings.newBuilder().setSegmentDuration(Duration.newBuilder().setSeconds(2).build()).build()).build()).addManifests(Manifest.newBuilder().setFileName("manifest.m3u8").setType(ManifestType.HLS).addMuxStreams("mux_video").addMuxStreams("mux_audio").setMaxSegmentCount(5).build())).build();
Channel result = livestreamServiceClient.createChannelAsync(createChannelRequest).get(1, TimeUnit.MINUTES);
System.out.println("Channel: " + result.getName());
}
}
use of com.google.cloud.video.livestream.v1.AudioStream in project java-docs-samples by GoogleCloudPlatform.
the class CreateJobWithAnimatedOverlay method createJobWithAnimatedOverlay.
// Creates a job from an ad-hoc configuration and adds an animated overlay to it.
public static void createJobWithAnimatedOverlay(String projectId, String location, String inputUri, String overlayImageUri, String outputUri) throws IOException {
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 = VideoStream.newBuilder().setH264(VideoStream.H264CodecSettings.newBuilder().setBitrateBps(550000).setFrameRate(60).setHeightPixels(360).setWidthPixels(640)).build();
AudioStream audioStream0 = AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
// Create the overlay image. Only JPEG is supported. Image resolution is based on output
// video resolution. This example uses the values x: 0 and y: 0 to maintain the original
// resolution of the overlay image.
Overlay.Image overlayImage = Overlay.Image.newBuilder().setUri(overlayImageUri).setResolution(NormalizedCoordinate.newBuilder().setX(0).setY(0).build()).setAlpha(1).build();
// Create the starting animation (when the overlay starts to fade in). Use the values x: 0.5
// and y: 0.5 to position the top-left corner of the overlay in the top-left corner of the
// output video.
Overlay.Animation animationFadeIn = Animation.newBuilder().setAnimationFade(AnimationFade.newBuilder().setFadeType(FadeType.FADE_IN).setXy(NormalizedCoordinate.newBuilder().setX(0.5).setY(0.5).build()).setStartTimeOffset(Duration.newBuilder().setSeconds(5).build()).setEndTimeOffset(Duration.newBuilder().setSeconds(10).build()).build()).build();
// Create the ending animation (when the overlay starts to fade out). The overlay will start
// to fade out at the 12-second mark in the output video.
Overlay.Animation animationFadeOut = Animation.newBuilder().setAnimationFade(AnimationFade.newBuilder().setFadeType(FadeType.FADE_OUT).setXy(NormalizedCoordinate.newBuilder().setX(0.5).setY(0.5).build()).setStartTimeOffset(Duration.newBuilder().setSeconds(12).build()).setEndTimeOffset(Duration.newBuilder().setSeconds(15).build()).build()).build();
// Create the overlay and add the image and animations to it.
Overlay overlay = Overlay.newBuilder().setImage(overlayImage).addAnimations(animationFadeIn).addAnimations(animationFadeOut).build();
JobConfig config = JobConfig.newBuilder().addInputs(Input.newBuilder().setKey("input0").setUri(inputUri)).setOutput(Output.newBuilder().setUri(outputUri)).addElementaryStreams(ElementaryStream.newBuilder().setKey("video_stream0").setVideoStream(videoStream0)).addElementaryStreams(ElementaryStream.newBuilder().setKey("audio_stream0").setAudioStream(audioStream0)).addMuxStreams(MuxStream.newBuilder().setKey("sd").setContainer("mp4").addElementaryStreams("video_stream0").addElementaryStreams("audio_stream0").build()).addOverlays(// Add the overlay to the job config
overlay).build();
var createJobRequest = CreateJobRequest.newBuilder().setJob(Job.newBuilder().setInputUri(inputUri).setOutputUri(outputUri).setConfig(config).build()).setParent(LocationName.of(projectId, location).toString()).build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
use of com.google.cloud.video.livestream.v1.AudioStream in project java-docs-samples by GoogleCloudPlatform.
the class CreateJobWithConcatenatedInputs method createJobWithConcatenatedInputs.
// Creates a job from an ad-hoc configuration that concatenates two input videos.
public static void createJobWithConcatenatedInputs(String projectId, String location, String inputUri1, Duration startTimeInput1, Duration endTimeInput1, String inputUri2, Duration startTimeInput2, Duration endTimeInput2, String outputUri) throws IOException {
// once, and can be reused for multiple requests.
try (TranscoderServiceClient transcoderServiceClient = TranscoderServiceClient.create()) {
VideoStream videoStream0 = VideoStream.newBuilder().setH264(VideoStream.H264CodecSettings.newBuilder().setBitrateBps(550000).setFrameRate(60).setHeightPixels(360).setWidthPixels(640)).build();
AudioStream audioStream0 = AudioStream.newBuilder().setCodec("aac").setBitrateBps(64000).build();
JobConfig config = JobConfig.newBuilder().addInputs(Input.newBuilder().setKey("input1").setUri(inputUri1)).addInputs(Input.newBuilder().setKey("input2").setUri(inputUri2)).setOutput(Output.newBuilder().setUri(outputUri)).addElementaryStreams(ElementaryStream.newBuilder().setKey("video_stream0").setVideoStream(videoStream0)).addElementaryStreams(ElementaryStream.newBuilder().setKey("audio_stream0").setAudioStream(audioStream0)).addMuxStreams(MuxStream.newBuilder().setKey("sd").setContainer("mp4").addElementaryStreams("video_stream0").addElementaryStreams("audio_stream0").build()).addEditList(// Index in the edit list
0, EditAtom.newBuilder().setKey("atom1").addInputs("input1").setStartTimeOffset(startTimeInput1).setEndTimeOffset(endTimeInput1).build()).addEditList(// Index in the edit list
1, EditAtom.newBuilder().setKey("atom2").addInputs("input2").setStartTimeOffset(startTimeInput2).setEndTimeOffset(endTimeInput2).build()).build();
var createJobRequest = CreateJobRequest.newBuilder().setJob(Job.newBuilder().setOutputUri(outputUri).setConfig(config).build()).setParent(LocationName.of(projectId, location).toString()).build();
// Send the job creation request and process the response.
Job job = transcoderServiceClient.createJob(createJobRequest);
System.out.println("Job: " + job.getName());
}
}
Aggregations