use of com.google.cloud.bigquery.datatransfer.v1.TransferConfig in project java-bigquerydatatransfer by googleapis.
the class CreateAmazonS3Transfer method createAmazonS3Transfer.
public static void createAmazonS3Transfer(String projectId, TransferConfig transferConfig) throws IOException {
try (DataTransferServiceClient client = DataTransferServiceClient.create()) {
ProjectName parent = ProjectName.of(projectId);
CreateTransferConfigRequest request = CreateTransferConfigRequest.newBuilder().setParent(parent.toString()).setTransferConfig(transferConfig).build();
TransferConfig config = client.createTransferConfig(request);
System.out.println("Amazon s3 transfer created successfully :" + config.getName());
} catch (ApiException ex) {
System.out.print("Amazon s3 transfer was not created." + ex.toString());
}
}
use of com.google.cloud.bigquery.datatransfer.v1.TransferConfig in project java-bigquerydatatransfer by googleapis.
the class CreateAmazonS3Transfer method main.
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
final String projectId = "MY_PROJECT_ID";
String datasetId = "MY_DATASET_ID";
String tableId = "MY_TABLE_ID";
// Amazon S3 Bucket Uri with read role permission
String sourceUri = "s3://your-bucket-name/*";
String awsAccessKeyId = "MY_AWS_ACCESS_KEY_ID";
String awsSecretAccessId = "AWS_SECRET_ACCESS_ID";
String sourceFormat = "CSV";
String fieldDelimiter = ",";
String skipLeadingRows = "1";
Map<String, Value> params = new HashMap<>();
params.put("destination_table_name_template", Value.newBuilder().setStringValue(tableId).build());
params.put("data_path", Value.newBuilder().setStringValue(sourceUri).build());
params.put("access_key_id", Value.newBuilder().setStringValue(awsAccessKeyId).build());
params.put("secret_access_key", Value.newBuilder().setStringValue(awsSecretAccessId).build());
params.put("source_format", Value.newBuilder().setStringValue(sourceFormat).build());
params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build());
params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build());
TransferConfig transferConfig = TransferConfig.newBuilder().setDestinationDatasetId(datasetId).setDisplayName("Your Aws S3 Config Name").setDataSourceId("amazon_s3").setParams(Struct.newBuilder().putAllFields(params).build()).setSchedule("every 24 hours").build();
createAmazonS3Transfer(projectId, transferConfig);
}
use of com.google.cloud.bigquery.datatransfer.v1.TransferConfig in project java-bigquerydatatransfer by googleapis.
the class CreateCloudStorageTransfer method createCloudStorageTransfer.
public static void createCloudStorageTransfer(String projectId, TransferConfig transferConfig) throws IOException {
try (DataTransferServiceClient client = DataTransferServiceClient.create()) {
ProjectName parent = ProjectName.of(projectId);
CreateTransferConfigRequest request = CreateTransferConfigRequest.newBuilder().setParent(parent.toString()).setTransferConfig(transferConfig).build();
TransferConfig config = client.createTransferConfig(request);
System.out.println("Cloud storage transfer created successfully :" + config.getName());
} catch (ApiException ex) {
System.out.print("Cloud storage transfer was not created." + ex.toString());
}
}
use of com.google.cloud.bigquery.datatransfer.v1.TransferConfig in project java-bigquerydatatransfer by googleapis.
the class CreateCloudStorageTransfer method main.
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
final String projectId = "MY_PROJECT_ID";
String datasetId = "MY_DATASET_ID";
String tableId = "MY_TABLE_ID";
// GCS Uri
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv";
String fileFormat = "CSV";
String fieldDelimiter = ",";
String skipLeadingRows = "1";
Map<String, Value> params = new HashMap<>();
params.put("destination_table_name_template", Value.newBuilder().setStringValue(tableId).build());
params.put("data_path_template", Value.newBuilder().setStringValue(sourceUri).build());
params.put("write_disposition", Value.newBuilder().setStringValue("APPEND").build());
params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build());
params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build());
params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build());
TransferConfig transferConfig = TransferConfig.newBuilder().setDestinationDatasetId(datasetId).setDisplayName("Your Google Cloud Storage Config Name").setDataSourceId("google_cloud_storage").setParams(Struct.newBuilder().putAllFields(params).build()).setSchedule("every 24 hours").build();
createCloudStorageTransfer(projectId, transferConfig);
}
use of com.google.cloud.bigquery.datatransfer.v1.TransferConfig in project java-bigquerydatatransfer by googleapis.
the class CreateRedshiftTransfer method main.
public static void main(String[] args) throws IOException {
// TODO(developer): Replace these variables before running the sample.
final String projectId = "MY_PROJECT_ID";
String datasetId = "MY_DATASET_ID";
String datasetRegion = "US";
String jdbcUrl = "MY_JDBC_URL_CONNECTION_REDSHIFT";
String dbUserName = "MY_USERNAME";
String dbPassword = "MY_PASSWORD";
String accessKeyId = "MY_AWS_ACCESS_KEY_ID";
String secretAccessId = "MY_AWS_SECRET_ACCESS_ID";
String s3Bucket = "MY_S3_BUCKET_URI";
String redShiftSchema = "MY_REDSHIFT_SCHEMA";
String tableNamePatterns = "*";
String vpcAndReserveIpRange = "MY_VPC_AND_IP_RANGE";
Map<String, Value> params = new HashMap<>();
params.put("jdbc_url", Value.newBuilder().setStringValue(jdbcUrl).build());
params.put("database_username", Value.newBuilder().setStringValue(dbUserName).build());
params.put("database_password", Value.newBuilder().setStringValue(dbPassword).build());
params.put("access_key_id", Value.newBuilder().setStringValue(accessKeyId).build());
params.put("secret_access_key", Value.newBuilder().setStringValue(secretAccessId).build());
params.put("s3_bucket", Value.newBuilder().setStringValue(s3Bucket).build());
params.put("redshift_schema", Value.newBuilder().setStringValue(redShiftSchema).build());
params.put("table_name_patterns", Value.newBuilder().setStringValue(tableNamePatterns).build());
params.put("migration_infra_cidr", Value.newBuilder().setStringValue(vpcAndReserveIpRange).build());
TransferConfig transferConfig = TransferConfig.newBuilder().setDestinationDatasetId(datasetId).setDatasetRegion(datasetRegion).setDisplayName("Your Redshift Config Name").setDataSourceId("redshift").setParams(Struct.newBuilder().putAllFields(params).build()).setSchedule("every 24 hours").build();
createRedshiftTransfer(projectId, transferConfig);
}
Aggregations