use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.
the class CreateTrainingPipelineTextSentimentAnalysisSample method createTrainingPipelineTextSentimentAnalysisSample.
static void createTrainingPipelineTextSentimentAnalysisSample(String project, String trainingPipelineDisplayName, String datasetId, String modelDisplayName) throws IOException {
PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_text_sentiment_1.0.0.yaml";
LocationName locationName = LocationName.of(project, location);
AutoMlTextSentimentInputs trainingTaskInputs = AutoMlTextSentimentInputs.newBuilder().setSentimentMax(4).build();
InputDataConfig trainingInputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model model = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(trainingPipelineDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(trainingInputDataConfig).setModelToUpload(model).build();
TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Text Sentiment Analysis Response");
System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format("\tTraining Task Definition %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("State: %s\n", trainingPipelineResponse.getState());
System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("\tStartTime %s\n", trainingPipelineResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfig = trainingPipelineResponse.getInputDataConfig();
System.out.println("\tInput Data Config");
System.out.format("\t\tDataset Id: %s", inputDataConfig.getDatasetId());
System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfig.getAnnotationsFilter());
FractionSplit fractionSplit = inputDataConfig.getFractionSplit();
System.out.println("\t\tFraction Split");
System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", fractionSplit.getTestFraction());
FilterSplit filterSplit = inputDataConfig.getFilterSplit();
System.out.println("\t\tFilter Split");
System.out.format("\t\t\tTraining Filter: %s\n", filterSplit.getTrainingFilter());
System.out.format("\t\t\tValidation Filter: %s\n", filterSplit.getValidationFilter());
System.out.format("\t\t\tTest Filter: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfig.getPredefinedSplit();
System.out.println("\t\tPredefined Split");
System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfig.getTimestampSplit();
System.out.println("\t\tTimestamp Split");
System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("\tModel To Upload");
System.out.format("\t\tName: %s\n", modelResponse.getName());
System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("\t\tMetadata: %s\n", modelResponse.getMetadata());
System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList());
System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList());
System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList());
System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
System.out.format("\t\tLabels: %sn\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("\t\tPredict Schemata");
System.out.format("\t\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("\t\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("\t\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (ExportFormat exportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("\t\tSupported Export Format");
System.out.format("\t\t\tId: %s\n", exportFormat.getId());
}
ModelContainerSpec modelContainerSpec = modelResponse.getContainerSpec();
System.out.println("\t\tContainer Spec");
System.out.format("\t\t\tImage Uri: %s\n", modelContainerSpec.getImageUri());
System.out.format("\t\t\tCommand: %s\n", modelContainerSpec.getCommandList());
System.out.format("\t\t\tArgs: %s\n", modelContainerSpec.getArgsList());
System.out.format("\t\t\tPredict Route: %s\n", modelContainerSpec.getPredictRoute());
System.out.format("\t\t\tHealth Route: %s\n", modelContainerSpec.getHealthRoute());
for (EnvVar envVar : modelContainerSpec.getEnvList()) {
System.out.println("\t\t\tEnv");
System.out.format("\t\t\t\tName: %s\n", envVar.getName());
System.out.format("\t\t\t\tValue: %s\n", envVar.getValue());
}
for (Port port : modelContainerSpec.getPortsList()) {
System.out.println("\t\t\tPort");
System.out.format("\t\t\t\tContainer Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("\t\tDeployed Model");
System.out.format("\t\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("\t\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
}
}
use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.
the class CreateTrainingPipelineVideoActionRecognitionSample method createTrainingPipelineVideoActionRecognitionSample.
static void createTrainingPipelineVideoActionRecognitionSample(String project, String displayName, String datasetId, String modelDisplayName) throws IOException {
PipelineServiceSettings settings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
String location = "us-central1";
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient client = PipelineServiceClient.create(settings)) {
AutoMlVideoActionRecognitionInputs trainingTaskInputs = AutoMlVideoActionRecognitionInputs.newBuilder().setModelType(ModelType.CLOUD).build();
InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(displayName).setTrainingTaskDefinition("gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_video_action_recognition_1.0.0.yaml").setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
LocationName parent = LocationName.of(project, location);
TrainingPipeline response = client.createTrainingPipeline(parent, trainingPipeline);
System.out.format("response: %s\n", response);
System.out.format("Name: %s\n", response.getName());
}
}
use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.
the class CreateBatchPredictionJobTextClassificationSample method createBatchPredictionJobTextClassificationSample.
static void createBatchPredictionJobTextClassificationSample(String project, String location, String displayName, String modelId, String gcsSourceUri, String gcsDestinationOutputUriPrefix) throws IOException {
// The AI Platform services require regional API endpoints.
JobServiceSettings settings = JobServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient client = JobServiceClient.create(settings)) {
try {
String modelName = ModelName.of(project, location, modelId).toString();
GcsSource gcsSource = GcsSource.newBuilder().addUris(gcsSourceUri).build();
BatchPredictionJob.InputConfig inputConfig = BatchPredictionJob.InputConfig.newBuilder().setInstancesFormat("jsonl").setGcsSource(gcsSource).build();
GcsDestination gcsDestination = GcsDestination.newBuilder().setOutputUriPrefix(gcsDestinationOutputUriPrefix).build();
BatchPredictionJob.OutputConfig outputConfig = BatchPredictionJob.OutputConfig.newBuilder().setPredictionsFormat("jsonl").setGcsDestination(gcsDestination).build();
BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().setDisplayName(displayName).setModel(modelName).setInputConfig(inputConfig).setOutputConfig(outputConfig).build();
LocationName parent = LocationName.of(project, location);
BatchPredictionJob response = client.createBatchPredictionJob(parent, batchPredictionJob);
System.out.format("response: %s\n", response);
} catch (ApiException ex) {
System.out.format("Exception: %s\n", ex.getLocalizedMessage());
}
}
}
use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.
the class CreateBatchPredictionJobTextSentimentAnalysisSample method createBatchPredictionJobTextSentimentAnalysisSample.
static void createBatchPredictionJobTextSentimentAnalysisSample(String project, String location, String displayName, String modelId, String gcsSourceUri, String gcsDestinationOutputUriPrefix) throws IOException {
// The AI Platform services require regional API endpoints.
JobServiceSettings settings = JobServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient client = JobServiceClient.create(settings)) {
try {
String modelName = ModelName.of(project, location, modelId).toString();
GcsSource gcsSource = GcsSource.newBuilder().addUris(gcsSourceUri).build();
BatchPredictionJob.InputConfig inputConfig = BatchPredictionJob.InputConfig.newBuilder().setInstancesFormat("jsonl").setGcsSource(gcsSource).build();
GcsDestination gcsDestination = GcsDestination.newBuilder().setOutputUriPrefix(gcsDestinationOutputUriPrefix).build();
BatchPredictionJob.OutputConfig outputConfig = BatchPredictionJob.OutputConfig.newBuilder().setPredictionsFormat("jsonl").setGcsDestination(gcsDestination).build();
BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().setDisplayName(displayName).setModel(modelName).setInputConfig(inputConfig).setOutputConfig(outputConfig).build();
LocationName parent = LocationName.of(project, location);
BatchPredictionJob response = client.createBatchPredictionJob(parent, batchPredictionJob);
System.out.format("response: %s\n", response);
} catch (ApiException ex) {
System.out.format("Exception: %s\n", ex.getLocalizedMessage());
}
}
}
use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.
the class CreateBatchPredictionJobVideoClassificationSample method createBatchPredictionJobVideoClassification.
static void createBatchPredictionJobVideoClassification(String batchPredictionDisplayName, String modelId, String gcsSourceUri, String gcsDestinationOutputUriPrefix, String project) throws IOException {
JobServiceSettings jobServiceSettings = JobServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient jobServiceClient = JobServiceClient.create(jobServiceSettings)) {
String location = "us-central1";
LocationName locationName = LocationName.of(project, location);
VideoClassificationPredictionParams modelParamsObj = VideoClassificationPredictionParams.newBuilder().setConfidenceThreshold(((float) 0.5)).setMaxPredictions(10000).setSegmentClassification(true).setShotClassification(true).setOneSecIntervalClassification(true).build();
Value modelParameters = ValueConverter.toValue(modelParamsObj);
ModelName modelName = ModelName.of(project, location, modelId);
GcsSource.Builder gcsSource = GcsSource.newBuilder();
gcsSource.addUris(gcsSourceUri);
InputConfig inputConfig = InputConfig.newBuilder().setInstancesFormat("jsonl").setGcsSource(gcsSource).build();
GcsDestination gcsDestination = GcsDestination.newBuilder().setOutputUriPrefix(gcsDestinationOutputUriPrefix).build();
OutputConfig outputConfig = OutputConfig.newBuilder().setPredictionsFormat("jsonl").setGcsDestination(gcsDestination).build();
BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().setDisplayName(batchPredictionDisplayName).setModel(modelName.toString()).setModelParameters(modelParameters).setInputConfig(inputConfig).setOutputConfig(outputConfig).build();
BatchPredictionJob batchPredictionJobResponse = jobServiceClient.createBatchPredictionJob(locationName, batchPredictionJob);
System.out.println("Create Batch Prediction Job Video Classification Response");
System.out.format("\tName: %s\n", batchPredictionJobResponse.getName());
System.out.format("\tDisplay Name: %s\n", batchPredictionJobResponse.getDisplayName());
System.out.format("\tModel %s\n", batchPredictionJobResponse.getModel());
System.out.format("\tModel Parameters: %s\n", batchPredictionJobResponse.getModelParameters());
System.out.format("\tState: %s\n", batchPredictionJobResponse.getState());
System.out.format("\tCreate Time: %s\n", batchPredictionJobResponse.getCreateTime());
System.out.format("\tStart Time: %s\n", batchPredictionJobResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", batchPredictionJobResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", batchPredictionJobResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", batchPredictionJobResponse.getLabelsMap());
InputConfig inputConfigResponse = batchPredictionJobResponse.getInputConfig();
System.out.println("\tInput Config");
System.out.format("\t\tInstances Format: %s\n", inputConfigResponse.getInstancesFormat());
GcsSource gcsSourceResponse = inputConfigResponse.getGcsSource();
System.out.println("\t\tGcs Source");
System.out.format("\t\t\tUris %s\n", gcsSourceResponse.getUrisList());
BigQuerySource bigQuerySource = inputConfigResponse.getBigquerySource();
System.out.println("\t\tBigquery Source");
System.out.format("\t\t\tInput_uri: %s\n", bigQuerySource.getInputUri());
OutputConfig outputConfigResponse = batchPredictionJobResponse.getOutputConfig();
System.out.println("\tOutput Config");
System.out.format("\t\tPredictions Format: %s\n", outputConfigResponse.getPredictionsFormat());
GcsDestination gcsDestinationResponse = outputConfigResponse.getGcsDestination();
System.out.println("\t\tGcs Destination");
System.out.format("\t\t\tOutput Uri Prefix: %s\n", gcsDestinationResponse.getOutputUriPrefix());
BigQueryDestination bigQueryDestination = outputConfigResponse.getBigqueryDestination();
System.out.println("\t\tBig Query Destination");
System.out.format("\t\t\tOutput Uri: %s\n", bigQueryDestination.getOutputUri());
BatchDedicatedResources batchDedicatedResources = batchPredictionJobResponse.getDedicatedResources();
System.out.println("\tBatch Dedicated Resources");
System.out.format("\t\tStarting Replica Count: %s\n", batchDedicatedResources.getStartingReplicaCount());
System.out.format("\t\tMax Replica Count: %s\n", batchDedicatedResources.getMaxReplicaCount());
MachineSpec machineSpec = batchDedicatedResources.getMachineSpec();
System.out.println("\t\tMachine Spec");
System.out.format("\t\t\tMachine Type: %s\n", machineSpec.getMachineType());
System.out.format("\t\t\tAccelerator Type: %s\n", machineSpec.getAcceleratorType());
System.out.format("\t\t\tAccelerator Count: %s\n", machineSpec.getAcceleratorCount());
ManualBatchTuningParameters manualBatchTuningParameters = batchPredictionJobResponse.getManualBatchTuningParameters();
System.out.println("\tManual Batch Tuning Parameters");
System.out.format("\t\tBatch Size: %s\n", manualBatchTuningParameters.getBatchSize());
OutputInfo outputInfo = batchPredictionJobResponse.getOutputInfo();
System.out.println("\tOutput Info");
System.out.format("\t\tGcs Output Directory: %s\n", outputInfo.getGcsOutputDirectory());
System.out.format("\t\tBigquery Output Dataset: %s\n", outputInfo.getBigqueryOutputDataset());
Status status = batchPredictionJobResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
List<Any> details = status.getDetailsList();
for (Status partialFailure : batchPredictionJobResponse.getPartialFailuresList()) {
System.out.println("\tPartial Failure");
System.out.format("\t\tCode: %s\n", partialFailure.getCode());
System.out.format("\t\tMessage: %s\n", partialFailure.getMessage());
List<Any> partialFailureDetailsList = partialFailure.getDetailsList();
}
ResourcesConsumed resourcesConsumed = batchPredictionJobResponse.getResourcesConsumed();
System.out.println("\tResources Consumed");
System.out.format("\t\tReplica Hours: %s\n", resourcesConsumed.getReplicaHours());
CompletionStats completionStats = batchPredictionJobResponse.getCompletionStats();
System.out.println("\tCompletion Stats");
System.out.format("\t\tSuccessful Count: %s\n", completionStats.getSuccessfulCount());
System.out.format("\t\tFailed Count: %s\n", completionStats.getFailedCount());
System.out.format("\t\tIncomplete Count: %s\n", completionStats.getIncompleteCount());
}
}
Aggregations