use of com.google.cloud.aiplatform.v1.BigQuerySource in project java-automl by googleapis.
the class TablesImportDataset method importDataset.
// Import a dataset via BigQuery or Google Cloud Storage
static void importDataset(String projectId, String datasetId, String path) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, "us-central1", datasetId);
InputConfig.Builder inputConfigBuilder = InputConfig.newBuilder();
// Determine which source type was used for the input path (BigQuery or GCS)
if (path.startsWith("bq")) {
// Get training data file to be imported from a BigQuery source.
BigQuerySource.Builder bigQuerySource = BigQuerySource.newBuilder();
bigQuerySource.setInputUri(path);
inputConfigBuilder.setBigquerySource(bigQuerySource);
} else {
// Get multiple Google Cloud Storage URIs to import data from
GcsSource gcsSource = GcsSource.newBuilder().addAllInputUris(Arrays.asList(path.split(","))).build();
inputConfigBuilder.setGcsSource(gcsSource);
}
// Import data from the input URI
System.out.println("Processing import...");
Empty response = client.importDataAsync(datasetFullId, inputConfigBuilder.build()).get();
System.out.format("Dataset imported. %s%n", response);
}
}
use of com.google.cloud.aiplatform.v1.BigQuerySource in project java-retail by googleapis.
the class ImportProductsBigQueryTable method getImportProductsBigQueryRequest.
public static ImportProductsRequest getImportProductsBigQueryRequest(ReconciliationMode reconciliationMode, String projectId, String datasetId, String tableId, String dataSchema, String branchName) {
BigQuerySource bigQuerySource = BigQuerySource.newBuilder().setProjectId(projectId).setDatasetId(datasetId).setTableId(tableId).setDataSchema(dataSchema).build();
ProductInputConfig inputConfig = ProductInputConfig.newBuilder().setBigQuerySource(bigQuerySource).build();
ImportProductsRequest importRequest = ImportProductsRequest.newBuilder().setParent(branchName).setReconciliationMode(reconciliationMode).setInputConfig(inputConfig).build();
System.out.printf("Import products from big query table request: %s%n", importRequest);
return importRequest;
}
use of com.google.cloud.aiplatform.v1.BigQuerySource in project java-retail by googleapis.
the class ImportUserEventsBigQuery method importUserEventsFromBigQuery.
public static void importUserEventsFromBigQuery(String projectId, String defaultCatalog, String datasetId, String tableId) throws IOException, InterruptedException {
try {
String dataSchema = "user_event";
BigQuerySource bigQuerySource = BigQuerySource.newBuilder().setProjectId(projectId).setDatasetId(datasetId).setTableId(tableId).setDataSchema(dataSchema).build();
UserEventInputConfig inputConfig = UserEventInputConfig.newBuilder().setBigQuerySource(bigQuerySource).build();
ImportUserEventsRequest importRequest = ImportUserEventsRequest.newBuilder().setParent(defaultCatalog).setInputConfig(inputConfig).build();
System.out.printf("Import user events from BigQuery source request: %s%n", importRequest);
// the "close" method on the client to safely clean up any remaining background resources.
try (UserEventServiceClient serviceClient = UserEventServiceClient.create()) {
String operationName = serviceClient.importUserEventsCallable().call(importRequest).getName();
System.out.printf("OperationName = %s\n", operationName);
OperationsClient operationsClient = serviceClient.getOperationsClient();
Operation operation = operationsClient.getOperation(operationName);
while (!operation.getDone()) {
// Keep polling the operation periodically until the import task is done.
int awaitDuration = 30000;
Thread.sleep(awaitDuration);
operation = operationsClient.getOperation(operationName);
}
if (operation.hasMetadata()) {
ImportMetadata metadata = operation.getMetadata().unpack(ImportMetadata.class);
System.out.printf("Number of successfully imported events: %s\n", metadata.getSuccessCount());
System.out.printf("Number of failures during the importing: %s\n", metadata.getFailureCount());
}
if (operation.hasResponse()) {
ImportUserEventsResponse response = operation.getResponse().unpack(ImportUserEventsResponse.class);
System.out.printf("Operation result: %s%n", response);
}
}
} catch (BigQueryException e) {
System.out.printf("Exception message: %s", e.getMessage());
}
}
use of com.google.cloud.aiplatform.v1.BigQuerySource in project java-aiplatform by googleapis.
the class CreateBatchPredictionJobBigquerySample method createBatchPredictionJobBigquerySample.
static void createBatchPredictionJobBigquerySample(String project, String displayName, String model, String instancesFormat, String bigquerySourceInputUri, String predictionsFormat, String bigqueryDestinationOutputUri) throws IOException {
JobServiceSettings settings = JobServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
String location = "us-central1";
// the "close" method on the client to safely clean up any remaining background resources.
try (JobServiceClient client = JobServiceClient.create(settings)) {
JsonObject jsonModelParameters = new JsonObject();
Value.Builder modelParametersBuilder = Value.newBuilder();
JsonFormat.parser().merge(jsonModelParameters.toString(), modelParametersBuilder);
Value modelParameters = modelParametersBuilder.build();
BigQuerySource bigquerySource = BigQuerySource.newBuilder().setInputUri(bigquerySourceInputUri).build();
BatchPredictionJob.InputConfig inputConfig = BatchPredictionJob.InputConfig.newBuilder().setInstancesFormat(instancesFormat).setBigquerySource(bigquerySource).build();
BigQueryDestination bigqueryDestination = BigQueryDestination.newBuilder().setOutputUri(bigqueryDestinationOutputUri).build();
BatchPredictionJob.OutputConfig outputConfig = BatchPredictionJob.OutputConfig.newBuilder().setPredictionsFormat(predictionsFormat).setBigqueryDestination(bigqueryDestination).build();
String modelName = ModelName.of(project, location, model).toString();
BatchPredictionJob batchPredictionJob = BatchPredictionJob.newBuilder().setDisplayName(displayName).setModel(modelName).setModelParameters(modelParameters).setInputConfig(inputConfig).setOutputConfig(outputConfig).build();
LocationName parent = LocationName.of(project, location);
BatchPredictionJob response = client.createBatchPredictionJob(parent, batchPredictionJob);
System.out.format("response: %s\n", response);
System.out.format("\tName: %s\n", response.getName());
}
}
Aggregations