Search in sources :

Example 6 with LocationName

use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.

the class CreateHyperparameterTuningJobSample method createHyperparameterTuningJobSample.

static void createHyperparameterTuningJobSample(String project, String displayName, String containerImageUri) throws IOException {
    JobServiceSettings settings = JobServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
    String location = "us-central1";
    // the "close" method on the client to safely clean up any remaining background resources.
    try (JobServiceClient client = JobServiceClient.create(settings)) {
        StudySpec.MetricSpec metric0 = StudySpec.MetricSpec.newBuilder().setMetricId("accuracy").setGoal(StudySpec.MetricSpec.GoalType.MAXIMIZE).build();
        StudySpec.ParameterSpec.DoubleValueSpec doubleValueSpec = StudySpec.ParameterSpec.DoubleValueSpec.newBuilder().setMinValue(0.001).setMaxValue(0.1).build();
        StudySpec.ParameterSpec parameter0 = StudySpec.ParameterSpec.newBuilder().setParameterId("lr").setDoubleValueSpec(doubleValueSpec).build();
        StudySpec studySpec = StudySpec.newBuilder().addMetrics(metric0).addParameters(parameter0).build();
        MachineSpec machineSpec = MachineSpec.newBuilder().setMachineType("n1-standard-4").setAcceleratorType(AcceleratorType.NVIDIA_TESLA_K80).setAcceleratorCount(1).build();
        ContainerSpec containerSpec = ContainerSpec.newBuilder().setImageUri(containerImageUri).build();
        WorkerPoolSpec workerPoolSpec0 = WorkerPoolSpec.newBuilder().setMachineSpec(machineSpec).setReplicaCount(1).setContainerSpec(containerSpec).build();
        CustomJobSpec trialJobSpec = CustomJobSpec.newBuilder().addWorkerPoolSpecs(workerPoolSpec0).build();
        HyperparameterTuningJob hyperparameterTuningJob = HyperparameterTuningJob.newBuilder().setDisplayName(displayName).setMaxTrialCount(2).setParallelTrialCount(1).setMaxFailedTrialCount(1).setStudySpec(studySpec).setTrialJobSpec(trialJobSpec).build();
        LocationName parent = LocationName.of(project, location);
        HyperparameterTuningJob response = client.createHyperparameterTuningJob(parent, hyperparameterTuningJob);
        System.out.format("response: %s\n", response);
        System.out.format("Name: %s\n", response.getName());
    }
}
Also used : JobServiceSettings(com.google.cloud.aiplatform.v1.JobServiceSettings) StudySpec(com.google.cloud.aiplatform.v1.StudySpec) ContainerSpec(com.google.cloud.aiplatform.v1.ContainerSpec) JobServiceClient(com.google.cloud.aiplatform.v1.JobServiceClient) CustomJobSpec(com.google.cloud.aiplatform.v1.CustomJobSpec) MachineSpec(com.google.cloud.aiplatform.v1.MachineSpec) LocationName(com.google.cloud.aiplatform.v1.LocationName) HyperparameterTuningJob(com.google.cloud.aiplatform.v1.HyperparameterTuningJob) WorkerPoolSpec(com.google.cloud.aiplatform.v1.WorkerPoolSpec)

Example 7 with LocationName

use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.

the class CreateTrainingPipelineCustomTrainingManagedDatasetSample method createTrainingPipelineCustomTrainingManagedDatasetSample.

static void createTrainingPipelineCustomTrainingManagedDatasetSample(String project, String displayName, String modelDisplayName, String datasetId, String annotationSchemaUri, String trainingContainerSpecImageUri, String modelContainerSpecImageUri, String baseOutputUriPrefix) throws IOException {
    PipelineServiceSettings settings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
    String location = "us-central1";
    // the "close" method on the client to safely clean up any remaining background resources.
    try (PipelineServiceClient client = PipelineServiceClient.create(settings)) {
        JsonArray jsonArgs = new JsonArray();
        jsonArgs.add("--model-dir=$(AIP_MODEL_DIR)");
        // training_task_inputs
        JsonObject jsonTrainingContainerSpec = new JsonObject();
        jsonTrainingContainerSpec.addProperty("imageUri", trainingContainerSpecImageUri);
        // AIP_MODEL_DIR is set by the service according to baseOutputDirectory.
        jsonTrainingContainerSpec.add("args", jsonArgs);
        JsonObject jsonMachineSpec = new JsonObject();
        jsonMachineSpec.addProperty("machineType", "n1-standard-8");
        JsonObject jsonTrainingWorkerPoolSpec = new JsonObject();
        jsonTrainingWorkerPoolSpec.addProperty("replicaCount", 1);
        jsonTrainingWorkerPoolSpec.add("machineSpec", jsonMachineSpec);
        jsonTrainingWorkerPoolSpec.add("containerSpec", jsonTrainingContainerSpec);
        JsonArray jsonWorkerPoolSpecs = new JsonArray();
        jsonWorkerPoolSpecs.add(jsonTrainingWorkerPoolSpec);
        JsonObject jsonBaseOutputDirectory = new JsonObject();
        jsonBaseOutputDirectory.addProperty("outputUriPrefix", baseOutputUriPrefix);
        JsonObject jsonTrainingTaskInputs = new JsonObject();
        jsonTrainingTaskInputs.add("workerPoolSpecs", jsonWorkerPoolSpecs);
        jsonTrainingTaskInputs.add("baseOutputDirectory", jsonBaseOutputDirectory);
        Value.Builder trainingTaskInputsBuilder = Value.newBuilder();
        JsonFormat.parser().merge(jsonTrainingTaskInputs.toString(), trainingTaskInputsBuilder);
        Value trainingTaskInputs = trainingTaskInputsBuilder.build();
        // model_to_upload
        ModelContainerSpec modelContainerSpec = ModelContainerSpec.newBuilder().setImageUri(modelContainerSpecImageUri).build();
        Model model = Model.newBuilder().setDisplayName(modelDisplayName).setContainerSpec(modelContainerSpec).build();
        GcsDestination gcsDestination = GcsDestination.newBuilder().setOutputUriPrefix(baseOutputUriPrefix).build();
        // input_data_config
        InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).setAnnotationSchemaUri(annotationSchemaUri).setGcsDestination(gcsDestination).build();
        // training_task_definition
        String customTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/custom_task_1.0.0.yaml";
        TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(displayName).setInputDataConfig(inputDataConfig).setTrainingTaskDefinition(customTaskDefinition).setTrainingTaskInputs(trainingTaskInputs).setModelToUpload(model).build();
        LocationName parent = LocationName.of(project, location);
        TrainingPipeline response = client.createTrainingPipeline(parent, trainingPipeline);
        System.out.format("response: %s\n", response);
        System.out.format("Name: %s\n", response.getName());
    }
}
Also used : TrainingPipeline(com.google.cloud.aiplatform.v1.TrainingPipeline) JsonObject(com.google.gson.JsonObject) InputDataConfig(com.google.cloud.aiplatform.v1.InputDataConfig) LocationName(com.google.cloud.aiplatform.v1.LocationName) JsonArray(com.google.gson.JsonArray) ModelContainerSpec(com.google.cloud.aiplatform.v1.ModelContainerSpec) Value(com.google.protobuf.Value) Model(com.google.cloud.aiplatform.v1.Model) PipelineServiceSettings(com.google.cloud.aiplatform.v1.PipelineServiceSettings) PipelineServiceClient(com.google.cloud.aiplatform.v1.PipelineServiceClient) GcsDestination(com.google.cloud.aiplatform.v1.GcsDestination)

Example 8 with LocationName

use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.

the class CreateTrainingPipelineImageObjectDetectionSample method createTrainingPipelineImageObjectDetectionSample.

static void createTrainingPipelineImageObjectDetectionSample(String project, String trainingPipelineDisplayName, String datasetId, String modelDisplayName) throws IOException {
    PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
    // the "close" method on the client to safely clean up any remaining background resources.
    try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
        String location = "us-central1";
        String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_image_object_detection_1.0.0.yaml";
        LocationName locationName = LocationName.of(project, location);
        AutoMlImageObjectDetectionInputs autoMlImageObjectDetectionInputs = AutoMlImageObjectDetectionInputs.newBuilder().setModelType(ModelType.CLOUD_HIGH_ACCURACY_1).setBudgetMilliNodeHours(20000).setDisableEarlyStopping(false).build();
        InputDataConfig trainingInputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
        Model model = Model.newBuilder().setDisplayName(modelDisplayName).build();
        TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(trainingPipelineDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(autoMlImageObjectDetectionInputs)).setInputDataConfig(trainingInputDataConfig).setModelToUpload(model).build();
        TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
        System.out.println("Create Training Pipeline Image Object Detection Response");
        System.out.format("Name: %s\n", trainingPipelineResponse.getName());
        System.out.format("Display Name: %s\n", trainingPipelineResponse.getDisplayName());
        System.out.format("Training Task Definition %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
        System.out.format("Training Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
        System.out.format("Training Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
        System.out.format("State: %s\n", trainingPipelineResponse.getState());
        System.out.format("Create Time: %s\n", trainingPipelineResponse.getCreateTime());
        System.out.format("StartTime %s\n", trainingPipelineResponse.getStartTime());
        System.out.format("End Time: %s\n", trainingPipelineResponse.getEndTime());
        System.out.format("Update Time: %s\n", trainingPipelineResponse.getUpdateTime());
        System.out.format("Labels: %s\n", trainingPipelineResponse.getLabelsMap());
        InputDataConfig inputDataConfig = trainingPipelineResponse.getInputDataConfig();
        System.out.println("Input Data Config");
        System.out.format("Dataset Id: %s", inputDataConfig.getDatasetId());
        System.out.format("Annotations Filter: %s\n", inputDataConfig.getAnnotationsFilter());
        FractionSplit fractionSplit = inputDataConfig.getFractionSplit();
        System.out.println("Fraction Split");
        System.out.format("Training Fraction: %s\n", fractionSplit.getTrainingFraction());
        System.out.format("Validation Fraction: %s\n", fractionSplit.getValidationFraction());
        System.out.format("Test Fraction: %s\n", fractionSplit.getTestFraction());
        FilterSplit filterSplit = inputDataConfig.getFilterSplit();
        System.out.println("Filter Split");
        System.out.format("Training Filter: %s\n", filterSplit.getTrainingFilter());
        System.out.format("Validation Filter: %s\n", filterSplit.getValidationFilter());
        System.out.format("Test Filter: %s\n", filterSplit.getTestFilter());
        PredefinedSplit predefinedSplit = inputDataConfig.getPredefinedSplit();
        System.out.println("Predefined Split");
        System.out.format("Key: %s\n", predefinedSplit.getKey());
        TimestampSplit timestampSplit = inputDataConfig.getTimestampSplit();
        System.out.println("Timestamp Split");
        System.out.format("Training Fraction: %s\n", timestampSplit.getTrainingFraction());
        System.out.format("Validation Fraction: %s\n", timestampSplit.getValidationFraction());
        System.out.format("Test Fraction: %s\n", timestampSplit.getTestFraction());
        System.out.format("Key: %s\n", timestampSplit.getKey());
        Model modelResponse = trainingPipelineResponse.getModelToUpload();
        System.out.println("Model To Upload");
        System.out.format("Name: %s\n", modelResponse.getName());
        System.out.format("Display Name: %s\n", modelResponse.getDisplayName());
        System.out.format("Description: %s\n", modelResponse.getDescription());
        System.out.format("Metadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
        System.out.format("Metadata: %s\n", modelResponse.getMetadata());
        System.out.format("Training Pipeline: %s\n", modelResponse.getTrainingPipeline());
        System.out.format("Artifact Uri: %s\n", modelResponse.getArtifactUri());
        System.out.format("Supported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList());
        System.out.format("Supported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList());
        System.out.format("Supported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList());
        System.out.format("Create Time: %s\n", modelResponse.getCreateTime());
        System.out.format("Update Time: %s\n", modelResponse.getUpdateTime());
        System.out.format("Labels: %sn\n", modelResponse.getLabelsMap());
        PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
        System.out.println("Predict Schemata");
        System.out.format("Instance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
        System.out.format("Parameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
        System.out.format("Prediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
        for (ExportFormat exportFormat : modelResponse.getSupportedExportFormatsList()) {
            System.out.println("Supported Export Format");
            System.out.format("Id: %s\n", exportFormat.getId());
        }
        ModelContainerSpec modelContainerSpec = modelResponse.getContainerSpec();
        System.out.println("Container Spec");
        System.out.format("Image Uri: %s\n", modelContainerSpec.getImageUri());
        System.out.format("Command: %s\n", modelContainerSpec.getCommandList());
        System.out.format("Args: %s\n", modelContainerSpec.getArgsList());
        System.out.format("Predict Route: %s\n", modelContainerSpec.getPredictRoute());
        System.out.format("Health Route: %s\n", modelContainerSpec.getHealthRoute());
        for (EnvVar envVar : modelContainerSpec.getEnvList()) {
            System.out.println("Env");
            System.out.format("Name: %s\n", envVar.getName());
            System.out.format("Value: %s\n", envVar.getValue());
        }
        for (Port port : modelContainerSpec.getPortsList()) {
            System.out.println("Port");
            System.out.format("Container Port: %s\n", port.getContainerPort());
        }
        for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
            System.out.println("Deployed Model");
            System.out.format("Endpoint: %s\n", deployedModelRef.getEndpoint());
            System.out.format("Deployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
        }
        Status status = trainingPipelineResponse.getError();
        System.out.println("Error");
        System.out.format("Code: %s\n", status.getCode());
        System.out.format("Message: %s\n", status.getMessage());
    }
}
Also used : Status(com.google.rpc.Status) PredictSchemata(com.google.cloud.aiplatform.v1.PredictSchemata) TrainingPipeline(com.google.cloud.aiplatform.v1.TrainingPipeline) TimestampSplit(com.google.cloud.aiplatform.v1.TimestampSplit) Port(com.google.cloud.aiplatform.v1.Port) AutoMlImageObjectDetectionInputs(com.google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.AutoMlImageObjectDetectionInputs) ExportFormat(com.google.cloud.aiplatform.v1.Model.ExportFormat) InputDataConfig(com.google.cloud.aiplatform.v1.InputDataConfig) LocationName(com.google.cloud.aiplatform.v1.LocationName) PredefinedSplit(com.google.cloud.aiplatform.v1.PredefinedSplit) FilterSplit(com.google.cloud.aiplatform.v1.FilterSplit) FractionSplit(com.google.cloud.aiplatform.v1.FractionSplit) ModelContainerSpec(com.google.cloud.aiplatform.v1.ModelContainerSpec) DeployedModelRef(com.google.cloud.aiplatform.v1.DeployedModelRef) Model(com.google.cloud.aiplatform.v1.Model) PipelineServiceSettings(com.google.cloud.aiplatform.v1.PipelineServiceSettings) EnvVar(com.google.cloud.aiplatform.v1.EnvVar) PipelineServiceClient(com.google.cloud.aiplatform.v1.PipelineServiceClient)

Example 9 with LocationName

use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.

the class CreateTrainingPipelineTabularClassificationSample method createTrainingPipelineTableClassification.

static void createTrainingPipelineTableClassification(String project, String modelDisplayName, String datasetId, String targetColumn) throws IOException {
    PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
    // the "close" method on the client to safely clean up any remaining background resources.
    try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
        String location = "us-central1";
        LocationName locationName = LocationName.of(project, location);
        String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_tables_1.0.0.yaml";
        // Set the columns used for training and their data types
        Transformation transformation1 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("sepal_width").build()).build();
        Transformation transformation2 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("sepal_length").build()).build();
        Transformation transformation3 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("petal_length").build()).build();
        Transformation transformation4 = Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("petal_width").build()).build();
        ArrayList<Transformation> transformationArrayList = new ArrayList<>();
        transformationArrayList.add(transformation1);
        transformationArrayList.add(transformation2);
        transformationArrayList.add(transformation3);
        transformationArrayList.add(transformation4);
        AutoMlTablesInputs autoMlTablesInputs = AutoMlTablesInputs.newBuilder().setTargetColumn(targetColumn).setPredictionType("classification").addAllTransformations(transformationArrayList).setTrainBudgetMilliNodeHours(8000).build();
        FractionSplit fractionSplit = FractionSplit.newBuilder().setTrainingFraction(0.8).setValidationFraction(0.1).setTestFraction(0.1).build();
        InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).setFractionSplit(fractionSplit).build();
        Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
        TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(modelDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(autoMlTablesInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
        TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
        System.out.println("Create Training Pipeline Tabular Classification Response");
        System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
        System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
        System.out.format("\tTraining Task Definition: %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
        System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
        System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
        System.out.format("\tState: %s\n", trainingPipelineResponse.getState());
        System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
        System.out.format("\tStart Time: %s\n", trainingPipelineResponse.getStartTime());
        System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
        System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
        System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
        InputDataConfig inputDataConfigResponse = trainingPipelineResponse.getInputDataConfig();
        System.out.println("\tInput Data Config");
        System.out.format("\t\tDataset Id: %s\n", inputDataConfigResponse.getDatasetId());
        System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfigResponse.getAnnotationsFilter());
        FractionSplit fractionSplitResponse = inputDataConfigResponse.getFractionSplit();
        System.out.println("\t\tFraction Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplitResponse.getTrainingFraction());
        System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplitResponse.getValidationFraction());
        System.out.format("\t\t\tTest Fraction: %s\n", fractionSplitResponse.getTestFraction());
        FilterSplit filterSplit = inputDataConfigResponse.getFilterSplit();
        System.out.println("\t\tFilter Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", filterSplit.getTrainingFilter());
        System.out.format("\t\t\tValidation Fraction: %s\n", filterSplit.getValidationFilter());
        System.out.format("\t\t\tTest Fraction: %s\n", filterSplit.getTestFilter());
        PredefinedSplit predefinedSplit = inputDataConfigResponse.getPredefinedSplit();
        System.out.println("\t\tPredefined Split");
        System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
        TimestampSplit timestampSplit = inputDataConfigResponse.getTimestampSplit();
        System.out.println("\t\tTimestamp Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
        System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
        System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
        System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
        Model modelResponse = trainingPipelineResponse.getModelToUpload();
        System.out.println("\tModel To Upload");
        System.out.format("\t\tName: %s\n", modelResponse.getName());
        System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
        System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
        System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
        System.out.format("\t\tMeta Data: %s\n", modelResponse.getMetadata());
        System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
        System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
        System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList().toString());
        System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList().toString());
        System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList().toString());
        System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
        System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
        System.out.format("\t\tLables: %s\n", modelResponse.getLabelsMap());
        PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
        System.out.println("\tPredict Schemata");
        System.out.format("\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
        System.out.format("\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
        System.out.format("\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
        for (Model.ExportFormat supportedExportFormat : modelResponse.getSupportedExportFormatsList()) {
            System.out.println("\tSupported Export Format");
            System.out.format("\t\tId: %s\n", supportedExportFormat.getId());
        }
        ModelContainerSpec containerSpec = modelResponse.getContainerSpec();
        System.out.println("\tContainer Spec");
        System.out.format("\t\tImage Uri: %s\n", containerSpec.getImageUri());
        System.out.format("\t\tCommand: %s\n", containerSpec.getCommandList());
        System.out.format("\t\tArgs: %s\n", containerSpec.getArgsList());
        System.out.format("\t\tPredict Route: %s\n", containerSpec.getPredictRoute());
        System.out.format("\t\tHealth Route: %s\n", containerSpec.getHealthRoute());
        for (EnvVar envVar : containerSpec.getEnvList()) {
            System.out.println("\t\tEnv");
            System.out.format("\t\t\tName: %s\n", envVar.getName());
            System.out.format("\t\t\tValue: %s\n", envVar.getValue());
        }
        for (Port port : containerSpec.getPortsList()) {
            System.out.println("\t\tPort");
            System.out.format("\t\t\tContainer Port: %s\n", port.getContainerPort());
        }
        for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
            System.out.println("\tDeployed Model");
            System.out.format("\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
            System.out.format("\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
        }
        Status status = trainingPipelineResponse.getError();
        System.out.println("\tError");
        System.out.format("\t\tCode: %s\n", status.getCode());
        System.out.format("\t\tMessage: %s\n", status.getMessage());
    }
}
Also used : Status(com.google.rpc.Status) PredictSchemata(com.google.cloud.aiplatform.v1.PredictSchemata) Transformation(com.google.cloud.aiplatform.v1.schema.trainingjob.definition.AutoMlTablesInputs.Transformation) AutoTransformation(com.google.cloud.aiplatform.v1.schema.trainingjob.definition.AutoMlTablesInputs.Transformation.AutoTransformation) TrainingPipeline(com.google.cloud.aiplatform.v1.TrainingPipeline) TimestampSplit(com.google.cloud.aiplatform.v1.TimestampSplit) Port(com.google.cloud.aiplatform.v1.Port) ArrayList(java.util.ArrayList) InputDataConfig(com.google.cloud.aiplatform.v1.InputDataConfig) LocationName(com.google.cloud.aiplatform.v1.LocationName) PredefinedSplit(com.google.cloud.aiplatform.v1.PredefinedSplit) FilterSplit(com.google.cloud.aiplatform.v1.FilterSplit) FractionSplit(com.google.cloud.aiplatform.v1.FractionSplit) ModelContainerSpec(com.google.cloud.aiplatform.v1.ModelContainerSpec) AutoMlTablesInputs(com.google.cloud.aiplatform.v1.schema.trainingjob.definition.AutoMlTablesInputs) DeployedModelRef(com.google.cloud.aiplatform.v1.DeployedModelRef) Model(com.google.cloud.aiplatform.v1.Model) PipelineServiceSettings(com.google.cloud.aiplatform.v1.PipelineServiceSettings) EnvVar(com.google.cloud.aiplatform.v1.EnvVar) PipelineServiceClient(com.google.cloud.aiplatform.v1.PipelineServiceClient)

Example 10 with LocationName

use of com.google.cloud.bigquery.connection.v1.LocationName in project java-aiplatform by googleapis.

the class CreateTrainingPipelineTabularRegressionSample method createTrainingPipelineTableRegression.

static void createTrainingPipelineTableRegression(String project, String modelDisplayName, String datasetId, String targetColumn) throws IOException {
    PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
    // the "close" method on the client to safely clean up any remaining background resources.
    try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
        String location = "us-central1";
        LocationName locationName = LocationName.of(project, location);
        String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/automl_tables_1.0.0.yaml";
        // Set the columns used for training and their data types
        ArrayList<Transformation> tranformations = new ArrayList<>();
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRING_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("INTEGER_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("FLOAT_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("FLOAT_5000unique_REPEATED")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("NUMERIC_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("BOOLEAN_2unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setTimestamp(TimestampTransformation.newBuilder().setColumnName("TIMESTAMP_1unique_NULLABLE").setInvalidValuesAllowed(true)).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("DATE_1unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("TIME_1unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setTimestamp(TimestampTransformation.newBuilder().setColumnName("DATETIME_1unique_NULLABLE").setInvalidValuesAllowed(true)).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.STRING_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.INTEGER_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_REQUIRED")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.FLOAT_5000unique_REPEATED")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.NUMERIC_5000unique_NULLABLE")).build());
        tranformations.add(Transformation.newBuilder().setAuto(AutoTransformation.newBuilder().setColumnName("STRUCT_NULLABLE.TIMESTAMP_1unique_NULLABLE")).build());
        AutoMlTablesInputs trainingTaskInputs = AutoMlTablesInputs.newBuilder().addAllTransformations(tranformations).setTargetColumn(targetColumn).setPredictionType("regression").setTrainBudgetMilliNodeHours(8000).setDisableEarlyStopping(false).setOptimizationObjective("minimize-rmse").build();
        FractionSplit fractionSplit = FractionSplit.newBuilder().setTrainingFraction(0.8).setValidationFraction(0.1).setTestFraction(0.1).build();
        InputDataConfig inputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).setFractionSplit(fractionSplit).build();
        Model modelToUpload = Model.newBuilder().setDisplayName(modelDisplayName).build();
        TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(modelDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(inputDataConfig).setModelToUpload(modelToUpload).build();
        TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
        System.out.println("Create Training Pipeline Tabular Regression Response");
        System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
        System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
        System.out.format("\tTraining Task Definition: %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
        System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
        System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
        System.out.format("\tState: %s\n", trainingPipelineResponse.getState());
        System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
        System.out.format("\tStart Time: %s\n", trainingPipelineResponse.getStartTime());
        System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
        System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
        System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
        InputDataConfig inputDataConfigResponse = trainingPipelineResponse.getInputDataConfig();
        System.out.println("\tInput Data Config");
        System.out.format("\t\tDataset Id: %s\n", inputDataConfigResponse.getDatasetId());
        System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfigResponse.getAnnotationsFilter());
        FractionSplit fractionSplitResponse = inputDataConfigResponse.getFractionSplit();
        System.out.println("\t\tFraction Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplitResponse.getTrainingFraction());
        System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplitResponse.getValidationFraction());
        System.out.format("\t\t\tTest Fraction: %s\n", fractionSplitResponse.getTestFraction());
        FilterSplit filterSplit = inputDataConfigResponse.getFilterSplit();
        System.out.println("\t\tFilter Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", filterSplit.getTrainingFilter());
        System.out.format("\t\t\tValidation Fraction: %s\n", filterSplit.getValidationFilter());
        System.out.format("\t\t\tTest Fraction: %s\n", filterSplit.getTestFilter());
        PredefinedSplit predefinedSplit = inputDataConfigResponse.getPredefinedSplit();
        System.out.println("\t\tPredefined Split");
        System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
        TimestampSplit timestampSplit = inputDataConfigResponse.getTimestampSplit();
        System.out.println("\t\tTimestamp Split");
        System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
        System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
        System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
        System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
        Model modelResponse = trainingPipelineResponse.getModelToUpload();
        System.out.println("\tModel To Upload");
        System.out.format("\t\tName: %s\n", modelResponse.getName());
        System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
        System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
        System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
        System.out.format("\t\tMeta Data: %s\n", modelResponse.getMetadata());
        System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
        System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
        System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList().toString());
        System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList().toString());
        System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList().toString());
        System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
        System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
        System.out.format("\t\tLables: %s\n", modelResponse.getLabelsMap());
        PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
        System.out.println("\tPredict Schemata");
        System.out.format("\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
        System.out.format("\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
        System.out.format("\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
        for (Model.ExportFormat supportedExportFormat : modelResponse.getSupportedExportFormatsList()) {
            System.out.println("\tSupported Export Format");
            System.out.format("\t\tId: %s\n", supportedExportFormat.getId());
        }
        ModelContainerSpec containerSpec = modelResponse.getContainerSpec();
        System.out.println("\tContainer Spec");
        System.out.format("\t\tImage Uri: %s\n", containerSpec.getImageUri());
        System.out.format("\t\tCommand: %s\n", containerSpec.getCommandList());
        System.out.format("\t\tArgs: %s\n", containerSpec.getArgsList());
        System.out.format("\t\tPredict Route: %s\n", containerSpec.getPredictRoute());
        System.out.format("\t\tHealth Route: %s\n", containerSpec.getHealthRoute());
        for (EnvVar envVar : containerSpec.getEnvList()) {
            System.out.println("\t\tEnv");
            System.out.format("\t\t\tName: %s\n", envVar.getName());
            System.out.format("\t\t\tValue: %s\n", envVar.getValue());
        }
        for (Port port : containerSpec.getPortsList()) {
            System.out.println("\t\tPort");
            System.out.format("\t\t\tContainer Port: %s\n", port.getContainerPort());
        }
        for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
            System.out.println("\tDeployed Model");
            System.out.format("\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
            System.out.format("\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
        }
        Status status = trainingPipelineResponse.getError();
        System.out.println("\tError");
        System.out.format("\t\tCode: %s\n", status.getCode());
        System.out.format("\t\tMessage: %s\n", status.getMessage());
    }
}
Also used : Status(com.google.rpc.Status) PredictSchemata(com.google.cloud.aiplatform.v1.PredictSchemata) AutoTransformation(com.google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.AutoMlTablesInputs.Transformation.AutoTransformation) TimestampTransformation(com.google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.AutoMlTablesInputs.Transformation.TimestampTransformation) Transformation(com.google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.AutoMlTablesInputs.Transformation) TrainingPipeline(com.google.cloud.aiplatform.v1.TrainingPipeline) TimestampSplit(com.google.cloud.aiplatform.v1.TimestampSplit) Port(com.google.cloud.aiplatform.v1.Port) ArrayList(java.util.ArrayList) InputDataConfig(com.google.cloud.aiplatform.v1.InputDataConfig) LocationName(com.google.cloud.aiplatform.v1.LocationName) PredefinedSplit(com.google.cloud.aiplatform.v1.PredefinedSplit) FilterSplit(com.google.cloud.aiplatform.v1.FilterSplit) FractionSplit(com.google.cloud.aiplatform.v1.FractionSplit) ModelContainerSpec(com.google.cloud.aiplatform.v1.ModelContainerSpec) AutoMlTablesInputs(com.google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.AutoMlTablesInputs) DeployedModelRef(com.google.cloud.aiplatform.v1.DeployedModelRef) Model(com.google.cloud.aiplatform.v1.Model) PipelineServiceSettings(com.google.cloud.aiplatform.v1.PipelineServiceSettings) EnvVar(com.google.cloud.aiplatform.v1.EnvVar) PipelineServiceClient(com.google.cloud.aiplatform.v1.PipelineServiceClient)

Aggregations

LocationName (com.google.cloud.aiplatform.v1.LocationName)36 Test (org.junit.Test)34 LocationName (com.google.privacy.dlp.v2.LocationName)22 OrganizationLocationName (com.google.privacy.dlp.v2.OrganizationLocationName)22 LocationName (com.google.cloud.translate.v3beta1.LocationName)18 TranslationServiceClient (com.google.cloud.translate.v3beta1.TranslationServiceClient)18 AutoMlClient (com.google.cloud.automl.v1.AutoMlClient)17 LocationName (com.google.cloud.automl.v1.LocationName)17 AbstractMessage (com.google.protobuf.AbstractMessage)17 JobServiceClient (com.google.cloud.aiplatform.v1.JobServiceClient)15 JobServiceSettings (com.google.cloud.aiplatform.v1.JobServiceSettings)15 Value (com.google.protobuf.Value)15 InvalidArgumentException (com.google.api.gax.rpc.InvalidArgumentException)14 Model (com.google.cloud.aiplatform.v1.Model)14 StatusRuntimeException (io.grpc.StatusRuntimeException)14 PipelineServiceClient (com.google.cloud.aiplatform.v1.PipelineServiceClient)13 PipelineServiceSettings (com.google.cloud.aiplatform.v1.PipelineServiceSettings)13 TrainingPipeline (com.google.cloud.aiplatform.v1.TrainingPipeline)13 LocationName (com.google.cloud.translate.v3.LocationName)13 TranslationServiceClient (com.google.cloud.translate.v3.TranslationServiceClient)13