use of com.google.cloud.documentai.v1beta2.OperationMetadata in project spring-cloud-gcp by GoogleCloudPlatform.
the class DocumentOcrTemplate method runOcrForDocument.
/**
* Runs OCR processing for a specified {@code document} and generates OCR output files under the
* path specified by {@code outputFilePathPrefix}.
*
* <p>For example, if you specify an {@code outputFilePathPrefix} of
* "gs://bucket_name/ocr_results/myDoc_", all the output files of OCR processing will be saved
* under prefix, such as:
*
* <ul>
* <li>gs://bucket_name/ocr_results/myDoc_output-1-to-5.json
* <li>gs://bucket_name/ocr_results/myDoc_output-6-to-10.json
* <li>gs://bucket_name/ocr_results/myDoc_output-11-to-15.json
* </ul>
*
* <p>Note: OCR processing operations may take several minutes to complete, so it may not be
* advisable to block on the completion of the operation. One may use the returned {@link
* ListenableFuture} to register callbacks or track the status of the operation.
*
* @param document The {@link GoogleStorageLocation} of the document to run OCR processing
* @param outputFilePathPrefix The {@link GoogleStorageLocation} of a file, folder, or a bucket
* describing the path for which all output files shall be saved under
* @return A {@link ListenableFuture} allowing you to register callbacks or wait for the
* completion of the operation.
*/
public ListenableFuture<DocumentOcrResultSet> runOcrForDocument(GoogleStorageLocation document, GoogleStorageLocation outputFilePathPrefix) {
Assert.isTrue(document.isFile(), "Provided document location is not a valid file location: " + document);
GcsSource gcsSource = GcsSource.newBuilder().setUri(document.uriString()).build();
String contentType = extractContentType(document);
InputConfig inputConfig = InputConfig.newBuilder().setMimeType(contentType).setGcsSource(gcsSource).build();
GcsDestination gcsDestination = GcsDestination.newBuilder().setUri(outputFilePathPrefix.uriString()).build();
OutputConfig outputConfig = OutputConfig.newBuilder().setGcsDestination(gcsDestination).setBatchSize(this.jsonOutputBatchSize).build();
AsyncAnnotateFileRequest request = AsyncAnnotateFileRequest.newBuilder().addFeatures(DOCUMENT_OCR_FEATURE).setInputConfig(inputConfig).setOutputConfig(outputConfig).build();
OperationFuture<AsyncBatchAnnotateFilesResponse, OperationMetadata> result = imageAnnotatorClient.asyncBatchAnnotateFilesAsync(Collections.singletonList(request));
return extractOcrResultFuture(result);
}
use of com.google.cloud.documentai.v1beta2.OperationMetadata in project java-automl by googleapis.
the class LanguageTextClassificationCreateDataset method createDataset.
// Create a dataset
static void createDataset(String projectId, String displayName) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, "us-central1");
// Specify the classification type
// Types:
// MultiLabel: Multiple labels are allowed for one example.
// MultiClass: At most one label is allowed per example.
ClassificationType classificationType = ClassificationType.MULTILABEL;
// Specify the text classification type for the dataset.
TextClassificationDatasetMetadata metadata = TextClassificationDatasetMetadata.newBuilder().setClassificationType(classificationType).build();
Dataset dataset = Dataset.newBuilder().setDisplayName(displayName).setTextClassificationDatasetMetadata(metadata).build();
OperationFuture<Dataset, OperationMetadata> future = client.createDatasetAsync(projectLocation, dataset);
Dataset createdDataset = future.get();
// Display the dataset information.
System.out.format("Dataset name: %s\n", createdDataset.getName());
// To get the dataset id, you have to parse it out of the `name` field. As dataset Ids are
// required for other methods.
// Name Form: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`
String[] names = createdDataset.getName().split("/");
String datasetId = names[names.length - 1];
System.out.format("Dataset id: %s\n", datasetId);
}
}
use of com.google.cloud.documentai.v1beta2.OperationMetadata in project java-automl by googleapis.
the class UndeployModel method undeployModel.
// Undeploy a model from prediction
static void undeployModel(String projectId, String modelId) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// Get the full path of the model.
ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);
UndeployModelRequest request = UndeployModelRequest.newBuilder().setName(modelFullId.toString()).build();
OperationFuture<Empty, OperationMetadata> future = client.undeployModelAsync(request);
future.get();
System.out.println("Model undeployment finished");
}
}
use of com.google.cloud.documentai.v1beta2.OperationMetadata in project java-automl by googleapis.
the class VisionClassificationCreateModel method createModel.
// Create a model
static void createModel(String projectId, String datasetId, String displayName) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, "us-central1");
// Set model metadata.
ImageClassificationModelMetadata metadata = ImageClassificationModelMetadata.newBuilder().setTrainBudgetMilliNodeHours(24000).build();
Model model = Model.newBuilder().setDisplayName(displayName).setDatasetId(datasetId).setImageClassificationModelMetadata(metadata).build();
// Create a model with the model metadata in the region.
OperationFuture<Model, OperationMetadata> future = client.createModelAsync(projectLocation, model);
// OperationFuture.get() will block until the model is created, which may take several hours.
// You can use OperationFuture.getInitialFuture to get a future representing the initial
// response to the request, which contains information while the operation is in progress.
System.out.format("Training operation name: %s\n", future.getInitialFuture().get().getName());
System.out.println("Training started...");
}
}
use of com.google.cloud.documentai.v1beta2.OperationMetadata in project java-automl by googleapis.
the class ClassificationDeployModelNodeCount method classificationDeployModelNodeCount.
// Deploy a model with a specified node count
static void classificationDeployModelNodeCount(String projectId, String modelId) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// Get the full path of the model.
ModelName modelFullId = ModelName.of(projectId, "us-central1", modelId);
// Set how many nodes the model is deployed on
ImageClassificationModelDeploymentMetadata deploymentMetadata = ImageClassificationModelDeploymentMetadata.newBuilder().setNodeCount(2).build();
DeployModelRequest request = DeployModelRequest.newBuilder().setName(modelFullId.toString()).setImageClassificationModelDeploymentMetadata(deploymentMetadata).build();
// Deploy the model
OperationFuture<Empty, OperationMetadata> future = client.deployModelAsync(request);
future.get();
System.out.println("Model deployment on 2 nodes finished");
}
}
Aggregations