use of com.google.api.services.cloudresourcemanager.v3.model in project java-aiplatform by googleapis.
the class CreateTrainingPipelineTabularClassificationSample method createTrainingPipelineTableClassification.
static void createTrainingPipelineTableClassification(String project, String modelDisplayName, String datasetId, String targetColumn) throws IOException {
PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
LocationName locationName = LocationName.of(project, location);
String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_tables_1.0.0.yaml";
// Set the columns used for training and their data types
Transformation transformation1 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("sepal_width").build()).build();
Transformation transformation2 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("sepal_length").build()).build();
Transformation transformation3 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("petal_length").build()).build();
Transformation transformation4 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("petal_width").build()).build();
ArrayList<Transformation> transformationArrayList = new ArrayList<>();
transformationArrayList.add(transformation1);
transformationArrayList.add(transformation2);
transformationArrayList.add(transformation3);
transformationArrayList.add(transformation4);
AutoMlTablesInputs autoMlTablesInputs = AutoMlTablesInputs.newBuilder().setTargetColumn(targetColumn).setPredictionType("classification").addAllTransformations(transformationArrayList).setTrainBudgetMilliNodeHours(8000).build();
FractionSplit fractionSplit = FractionSplit.newBuilder().setTrainingFraction(0.8).setValidationFraction(0.1).setTestFraction(0.1).build();
InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).setFractionSplit(fractionSplit).build();
Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(modelDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(autoMlTablesInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Tabular Classification Response");
System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format("\tTraining Task Definition: %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("\tState: %s\n", trainingPipelineResponse.getState());
System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("\tStart Time: %s\n", trainingPipelineResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfigResponse = trainingPipelineResponse.getInputDataConfig();
System.out.println("\tInput Data Config");
System.out.format("\t\tDataset Id: %s\n", inputDataConfigResponse.getDatasetId());
System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfigResponse.getAnnotationsFilter());
FractionSplit fractionSplitResponse = inputDataConfigResponse.getFractionSplit();
System.out.println("\t\tFraction Split");
System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplitResponse.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplitResponse.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", fractionSplitResponse.getTestFraction());
FilterSplit filterSplit = inputDataConfigResponse.getFilterSplit();
System.out.println("\t\tFilter Split");
System.out.format("\t\t\tTraining Fraction: %s\n", filterSplit.getTrainingFilter());
System.out.format("\t\t\tValidation Fraction: %s\n", filterSplit.getValidationFilter());
System.out.format("\t\t\tTest Fraction: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfigResponse.getPredefinedSplit();
System.out.println("\t\tPredefined Split");
System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfigResponse.getTimestampSplit();
System.out.println("\t\tTimestamp Split");
System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("\tModel To Upload");
System.out.format("\t\tName: %s\n", modelResponse.getName());
System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("\t\tMeta Data: %s\n", modelResponse.getMetadata());
System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList().toString());
System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList().toString());
System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList().toString());
System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
System.out.format("\t\tLables: %s\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("\tPredict Schemata");
System.out.format("\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (Model.ExportFormat supportedExportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("\tSupported Export Format");
System.out.format("\t\tId: %s\n", supportedExportFormat.getId());
}
ModelContainerSpec containerSpec = modelResponse.getContainerSpec();
System.out.println("\tContainer Spec");
System.out.format("\t\tImage Uri: %s\n", containerSpec.getImageUri());
System.out.format("\t\tCommand: %s\n", containerSpec.getCommandList());
System.out.format("\t\tArgs: %s\n", containerSpec.getArgsList());
System.out.format("\t\tPredict Route: %s\n", containerSpec.getPredictRoute());
System.out.format("\t\tHealth Route: %s\n", containerSpec.getHealthRoute());
for (EnvVar envVar : containerSpec.getEnvList()) {
System.out.println("\t\tEnv");
System.out.format("\t\t\tName: %s\n", envVar.getName());
System.out.format("\t\t\tValue: %s\n", envVar.getValue());
}
for (Port port : containerSpec.getPortsList()) {
System.out.println("\t\tPort");
System.out.format("\t\t\tContainer Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("\tDeployed Model");
System.out.format("\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
}
}
use of com.google.api.services.cloudresourcemanager.v3.model in project java-aiplatform by googleapis.
the class CreateTrainingPipelineTabularRegressionSample method createTrainingPipelineTableRegression.
static void createTrainingPipelineTableRegression(String project, String modelDisplayName, String datasetId, String targetColumn) throws IOException {
PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
LocationName locationName = LocationName.of(project, location);
String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_tables_1.0.0.yaml";
// Set the columns used for training and their data types
ArrayList<Transformation> tranformations = new ArrayList<>();
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRING_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("INTEGER_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("FLOAT_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("FLOAT_5000unique_REPEATED")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("NUMERIC_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("BOOLEAN_2unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setTimestamp(TimestampTransformation.newBuilder().setColumnName("TIMESTAMP_1unique_NULLABLE").setInvalidValuesAllowed(true)).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("DATE_1unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("TIME_1unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setTimestamp(TimestampTransformation.newBuilder().setColumnName("DATETIME_1unique_NULLABLE").setInvalidValuesAllowed(true)).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.STRING_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.INTEGER_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_REQUIRED")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_REPEATED")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.NUMERIC_5000unique_NULLABLE")).build());
tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.TIMESTAMP_1unique_NULLABLE")).build());
AutoMlTablesInputs trainingTaskInputs = AutoMlTablesInputs.newBuilder().addAllTransformations(tranformations).setTargetColumn(targetColumn).setPredictionType("regression").setTrainBudgetMilliNodeHours(8000).setDisableEarlyStopping(false).setOptimizationObjective("minimize-rmse").build();
FractionSplit fractionSplit = FractionSplit.newBuilder().setTrainingFraction(0.8).setValidationFraction(0.1).setTestFraction(0.1).build();
InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).setFractionSplit(fractionSplit).build();
Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(modelDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Tabular Regression Response");
System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format("\tTraining Task Definition: %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("\tState: %s\n", trainingPipelineResponse.getState());
System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("\tStart Time: %s\n", trainingPipelineResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfigResponse = trainingPipelineResponse.getInputDataConfig();
System.out.println("\tInput Data Config");
System.out.format("\t\tDataset Id: %s\n", inputDataConfigResponse.getDatasetId());
System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfigResponse.getAnnotationsFilter());
FractionSplit fractionSplitResponse = inputDataConfigResponse.getFractionSplit();
System.out.println("\t\tFraction Split");
System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplitResponse.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplitResponse.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", fractionSplitResponse.getTestFraction());
FilterSplit filterSplit = inputDataConfigResponse.getFilterSplit();
System.out.println("\t\tFilter Split");
System.out.format("\t\t\tTraining Fraction: %s\n", filterSplit.getTrainingFilter());
System.out.format("\t\t\tValidation Fraction: %s\n", filterSplit.getValidationFilter());
System.out.format("\t\t\tTest Fraction: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfigResponse.getPredefinedSplit();
System.out.println("\t\tPredefined Split");
System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfigResponse.getTimestampSplit();
System.out.println("\t\tTimestamp Split");
System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("\tModel To Upload");
System.out.format("\t\tName: %s\n", modelResponse.getName());
System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("\t\tMeta Data: %s\n", modelResponse.getMetadata());
System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList().toString());
System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList().toString());
System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList().toString());
System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
System.out.format("\t\tLables: %s\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("\tPredict Schemata");
System.out.format("\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (Model.ExportFormat supportedExportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("\tSupported Export Format");
System.out.format("\t\tId: %s\n", supportedExportFormat.getId());
}
ModelContainerSpec containerSpec = modelResponse.getContainerSpec();
System.out.println("\tContainer Spec");
System.out.format("\t\tImage Uri: %s\n", containerSpec.getImageUri());
System.out.format("\t\tCommand: %s\n", containerSpec.getCommandList());
System.out.format("\t\tArgs: %s\n", containerSpec.getArgsList());
System.out.format("\t\tPredict Route: %s\n", containerSpec.getPredictRoute());
System.out.format("\t\tHealth Route: %s\n", containerSpec.getHealthRoute());
for (EnvVar envVar : containerSpec.getEnvList()) {
System.out.println("\t\tEnv");
System.out.format("\t\t\tName: %s\n", envVar.getName());
System.out.format("\t\t\tValue: %s\n", envVar.getValue());
}
for (Port port : containerSpec.getPortsList()) {
System.out.println("\t\tPort");
System.out.format("\t\t\tContainer Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("\tDeployed Model");
System.out.format("\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
}
}
use of com.google.api.services.cloudresourcemanager.v3.model in project java-aiplatform by googleapis.
the class CreateTrainingPipelineTextSentimentAnalysisSample method createTrainingPipelineTextSentimentAnalysisSample.
static void createTrainingPipelineTextSentimentAnalysisSample(String project, String trainingPipelineDisplayName, String datasetId, String modelDisplayName) throws IOException {
PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_text_sentiment_1.0.0.yaml";
LocationName locationName = LocationName.of(project, location);
AutoMlTextSentimentInputs trainingTaskInputs = AutoMlTextSentimentInputs.newBuilder().setSentimentMax(4).build();
InputDataConfig trainingInputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model model = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(trainingPipelineDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(trainingInputDataConfig).setModelToUpload(model).build();
TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Text Sentiment Analysis Response");
System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format("\tTraining Task Definition %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("State: %s\n", trainingPipelineResponse.getState());
System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("\tStartTime %s\n", trainingPipelineResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfig = trainingPipelineResponse.getInputDataConfig();
System.out.println("\tInput Data Config");
System.out.format("\t\tDataset Id: %s", inputDataConfig.getDatasetId());
System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfig.getAnnotationsFilter());
FractionSplit fractionSplit = inputDataConfig.getFractionSplit();
System.out.println("\t\tFraction Split");
System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", fractionSplit.getTestFraction());
FilterSplit filterSplit = inputDataConfig.getFilterSplit();
System.out.println("\t\tFilter Split");
System.out.format("\t\t\tTraining Filter: %s\n", filterSplit.getTrainingFilter());
System.out.format("\t\t\tValidation Filter: %s\n", filterSplit.getValidationFilter());
System.out.format("\t\t\tTest Filter: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfig.getPredefinedSplit();
System.out.println("\t\tPredefined Split");
System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfig.getTimestampSplit();
System.out.println("\t\tTimestamp Split");
System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("\tModel To Upload");
System.out.format("\t\tName: %s\n", modelResponse.getName());
System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("\t\tMetadata: %s\n", modelResponse.getMetadata());
System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList());
System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList());
System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList());
System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
System.out.format("\t\tLabels: %sn\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("\t\tPredict Schemata");
System.out.format("\t\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("\t\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("\t\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (ExportFormat exportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("\t\tSupported Export Format");
System.out.format("\t\t\tId: %s\n", exportFormat.getId());
}
ModelContainerSpec modelContainerSpec = modelResponse.getContainerSpec();
System.out.println("\t\tContainer Spec");
System.out.format("\t\t\tImage Uri: %s\n", modelContainerSpec.getImageUri());
System.out.format("\t\t\tCommand: %s\n", modelContainerSpec.getCommandList());
System.out.format("\t\t\tArgs: %s\n", modelContainerSpec.getArgsList());
System.out.format("\t\t\tPredict Route: %s\n", modelContainerSpec.getPredictRoute());
System.out.format("\t\t\tHealth Route: %s\n", modelContainerSpec.getHealthRoute());
for (EnvVar envVar : modelContainerSpec.getEnvList()) {
System.out.println("\t\t\tEnv");
System.out.format("\t\t\t\tName: %s\n", envVar.getName());
System.out.format("\t\t\t\tValue: %s\n", envVar.getValue());
}
for (Port port : modelContainerSpec.getPortsList()) {
System.out.println("\t\t\tPort");
System.out.format("\t\t\t\tContainer Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("\t\tDeployed Model");
System.out.format("\t\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("\t\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
}
}
use of com.google.api.services.cloudresourcemanager.v3.model in project java-aiplatform by googleapis.
the class CreateTrainingPipelineVideoActionRecognitionSample method createTrainingPipelineVideoActionRecognitionSample.
static void createTrainingPipelineVideoActionRecognitionSample(String project, String displayName, String datasetId, String modelDisplayName) throws IOException {
PipelineServiceSettings settings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
String location = "us-central1";
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient client = PipelineServiceClient.create(settings)) {
AutoMlVideoActionRecognitionInputs trainingTaskInputs = AutoMlVideoActionRecognitionInputs.newBuilder().setModelType(ModelType.CLOUD).build();
InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(displayName).setTrainingTaskDefinition("gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_video_action_recognition_1.0.0.yaml").setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
LocationName parent = LocationName.of(project, location);
TrainingPipeline response = client.createTrainingPipeline(parent, trainingPipeline);
System.out.format("response: %s\n", response);
System.out.format("Name: %s\n", response.getName());
}
}
use of com.google.api.services.cloudresourcemanager.v3.model in project org.hl7.fhir.core by hapifhir.
the class TerminologyClientR5 method read.
@Override
public CanonicalResource read(String type, String id) {
Class<Resource> t;
try {
// todo: do we have to deal with any resource renaming? Use cases are limited...
t = (Class<Resource>) Class.forName("org.hl7.fhir.r5.model." + type);
} catch (ClassNotFoundException e) {
throw new FHIRException("Unable to fetch resources of type " + type + " in R5");
}
org.hl7.fhir.r5.model.Resource r5 = client.read(t, id);
if (r5 != null) {
throw new FHIRException("Unable to convert resource " + Utilities.pathURL(getAddress(), type, id) + " to R5 (internal representation)");
}
if (!(r5 instanceof CanonicalResource)) {
throw new FHIRException("Unable to convert resource " + Utilities.pathURL(getAddress(), type, id) + " to R5 canonical resource (internal representation)");
}
return (CanonicalResource) r5;
}
Aggregations