use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.
the class Detect method detectProperties.
/**
* Detects image properties such as color frequency from the specified local image.
*
* @param filePath The path to the file to detect properties.
* @param out A {@link PrintStream} to write
* @throws Exception on errors while closing the client.
* @throws IOException on Input/Output errors.
*/
public static void detectProperties(String filePath, PrintStream out) throws Exception, IOException {
List<AnnotateImageRequest> requests = new ArrayList<>();
ByteString imgBytes = ByteString.readFrom(new FileInputStream(filePath));
Image img = Image.newBuilder().setContent(imgBytes).build();
Feature feat = Feature.newBuilder().setType(Type.IMAGE_PROPERTIES).build();
AnnotateImageRequest request = AnnotateImageRequest.newBuilder().addFeatures(feat).setImage(img).build();
requests.add(request);
try (ImageAnnotatorClient client = ImageAnnotatorClient.create()) {
BatchAnnotateImagesResponse response = client.batchAnnotateImages(requests);
List<AnnotateImageResponse> responses = response.getResponsesList();
for (AnnotateImageResponse res : responses) {
if (res.hasError()) {
out.printf("Error: %s\n", res.getError().getMessage());
return;
}
// For full list of available annotations, see http://g.co/cloud/vision/docs
DominantColorsAnnotation colors = res.getImagePropertiesAnnotation().getDominantColors();
for (ColorInfo color : colors.getColorsList()) {
out.printf("fraction: %f\nr: %f, g: %f, b: %f\n", color.getPixelFraction(), color.getColor().getRed(), color.getColor().getGreen(), color.getColor().getBlue());
}
}
}
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.
the class Detect method detectCropHints.
// [END vision_web_entities_include_geo_results_uri]
/**
* Suggests a region to crop to for a local file.
*
* @param filePath The path to the local file used for web annotation detection.
* @param out A {@link PrintStream} to write the results to.
* @throws Exception on errors while closing the client.
* @throws IOException on Input/Output errors.
*/
public static void detectCropHints(String filePath, PrintStream out) throws Exception, IOException {
List<AnnotateImageRequest> requests = new ArrayList<>();
ByteString imgBytes = ByteString.readFrom(new FileInputStream(filePath));
Image img = Image.newBuilder().setContent(imgBytes).build();
Feature feat = Feature.newBuilder().setType(Type.CROP_HINTS).build();
AnnotateImageRequest request = AnnotateImageRequest.newBuilder().addFeatures(feat).setImage(img).build();
requests.add(request);
try (ImageAnnotatorClient client = ImageAnnotatorClient.create()) {
BatchAnnotateImagesResponse response = client.batchAnnotateImages(requests);
List<AnnotateImageResponse> responses = response.getResponsesList();
for (AnnotateImageResponse res : responses) {
if (res.hasError()) {
out.printf("Error: %s\n", res.getError().getMessage());
return;
}
// For full list of available annotations, see http://g.co/cloud/vision/docs
CropHintsAnnotation annotation = res.getCropHintsAnnotation();
for (CropHint hint : annotation.getCropHintsList()) {
out.println(hint.getBoundingPoly());
}
}
}
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.
the class Detect method detectWebEntitiesIncludeGeoResults.
// [START vision_web_entities_include_geo_results]
/**
* Find web entities given a local image.
* @param filePath The path of the image to detect.
* @param out A {@link PrintStream} to write the results to.
* @throws Exception on errors while closing the client.
* @throws IOException on Input/Output errors.
*/
public static void detectWebEntitiesIncludeGeoResults(String filePath, PrintStream out) throws Exception, IOException {
// Instantiates a client
try (ImageAnnotatorClient client = ImageAnnotatorClient.create()) {
// Read in the local image
ByteString contents = ByteString.readFrom(new FileInputStream(filePath));
// Build the image
Image image = Image.newBuilder().setContent(contents).build();
// Enable `IncludeGeoResults`
WebDetectionParams webDetectionParams = WebDetectionParams.newBuilder().setIncludeGeoResults(true).build();
// Set the parameters for the image
ImageContext imageContext = ImageContext.newBuilder().setWebDetectionParams(webDetectionParams).build();
// Create the request with the image, imageContext, and the specified feature: web detection
AnnotateImageRequest request = AnnotateImageRequest.newBuilder().addFeatures(Feature.newBuilder().setType(Type.WEB_DETECTION)).setImage(image).setImageContext(imageContext).build();
// Perform the request
BatchAnnotateImagesResponse response = client.batchAnnotateImages(Arrays.asList(request));
// Display the results
response.getResponsesList().stream().forEach(r -> r.getWebDetection().getWebEntitiesList().stream().forEach(entity -> {
out.format("Description: %s\n", entity.getDescription());
out.format("Score: %f\n", entity.getScore());
}));
}
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.
the class SynthesizeText method synthesizeSsml.
// [END tts_synthesize_text]
// [START tts_synthesize_ssml]
/**
* Demonstrates using the Text to Speech client to synthesize text or ssml.
*
* Note: ssml must be well-formed according to: (https://www.w3.org/TR/speech-synthesis/
* Example: <speak>Hello there.</speak>
* @param ssml the ssml document to be synthesized. (e.g., "<?xml...")
* @throws Exception on TextToSpeechClient Errors.
*/
public static void synthesizeSsml(String ssml) throws Exception {
// Instantiates a client
try (TextToSpeechClient textToSpeechClient = TextToSpeechClient.create()) {
// Set the ssml input to be synthesized
SynthesisInput input = SynthesisInput.newBuilder().setSsml(ssml).build();
// Build the voice request
VoiceSelectionParams voice = VoiceSelectionParams.newBuilder().setLanguageCode(// languageCode = "en_us"
"en-US").setSsmlGender(// ssmlVoiceGender = SsmlVoiceGender.FEMALE
SsmlVoiceGender.FEMALE).build();
// Select the type of audio file you want returned
AudioConfig audioConfig = AudioConfig.newBuilder().setAudioEncoding(// MP3 audio.
AudioEncoding.MP3).build();
// Perform the text-to-speech request
SynthesizeSpeechResponse response = textToSpeechClient.synthesizeSpeech(input, voice, audioConfig);
// Get the audio contents from the response
ByteString audioContents = response.getAudioContent();
// Write the response to the output file.
try (OutputStream out = new FileOutputStream("output.mp3")) {
out.write(audioContents.toByteArray());
System.out.println("Audio content written to file \"output.mp3\"");
}
}
}
use of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString in project java-docs-samples by GoogleCloudPlatform.
the class SynthesizeFile method synthesizeSsmlFile.
// [END tts_synthesize_text_file]
// [START tts_synthesize_ssml_file]
/**
* Demonstrates using the Text to Speech client to synthesize a text file or ssml file.
* @param ssmlFile the ssml document to be synthesized. (e.g., hello.ssml)
* @throws Exception on TextToSpeechClient Errors.
*/
public static void synthesizeSsmlFile(String ssmlFile) throws Exception {
// Instantiates a client
try (TextToSpeechClient textToSpeechClient = TextToSpeechClient.create()) {
// Read the file's contents
String contents = new String(Files.readAllBytes(Paths.get(ssmlFile)));
// Set the ssml input to be synthesized
SynthesisInput input = SynthesisInput.newBuilder().setSsml(contents).build();
// Build the voice request
VoiceSelectionParams voice = VoiceSelectionParams.newBuilder().setLanguageCode(// languageCode = "en_us"
"en-US").setSsmlGender(// ssmlVoiceGender = SsmlVoiceGender.FEMALE
SsmlVoiceGender.FEMALE).build();
// Select the type of audio file you want returned
AudioConfig audioConfig = AudioConfig.newBuilder().setAudioEncoding(// MP3 audio.
AudioEncoding.MP3).build();
// Perform the text-to-speech request
SynthesizeSpeechResponse response = textToSpeechClient.synthesizeSpeech(input, voice, audioConfig);
// Get the audio contents from the response
ByteString audioContents = response.getAudioContent();
// Write the response to the output file.
try (OutputStream out = new FileOutputStream("output.mp3")) {
out.write(audioContents.toByteArray());
System.out.println("Audio content written to file \"output.mp3\"");
}
}
}
Aggregations