use of com.google.cloud.automl.v1.Dataset in project java-automl by googleapis.
the class DeleteDataset method deleteDataset.
// Delete a dataset
static void deleteDataset(String projectId, String datasetId) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// Get the full path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, "us-central1", datasetId);
Empty response = client.deleteDatasetAsync(datasetFullId).get();
System.out.format("Dataset deleted. %s\n", response);
}
}
use of com.google.cloud.automl.v1.Dataset in project java-automl by googleapis.
the class ImportDataset method importDataset.
// Import a dataset
static void importDataset(String projectId, String datasetId, String path) throws IOException, ExecutionException, InterruptedException, TimeoutException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, "us-central1", datasetId);
// Get multiple Google Cloud Storage URIs to import data from
GcsSource gcsSource = GcsSource.newBuilder().addAllInputUris(Arrays.asList(path.split(","))).build();
// Import data from the input URI
InputConfig inputConfig = InputConfig.newBuilder().setGcsSource(gcsSource).build();
System.out.println("Processing import...");
// Start the import job
OperationFuture<Empty, OperationMetadata> operation = client.importDataAsync(datasetFullId, inputConfig);
System.out.format("Operation name: %s%n", operation.getName());
// If you want to wait for the operation to finish, adjust the timeout appropriately. The
// operation will still run if you choose not to wait for it to complete. You can check the
// status of your operation using the operation's name.
Empty response = operation.get(45, TimeUnit.MINUTES);
System.out.format("Dataset imported. %s%n", response);
} catch (TimeoutException e) {
System.out.println("The operation's polling period was not long enough.");
System.out.println("You can use the Operation's name to get the current status.");
System.out.println("The import job is still running and will complete as expected.");
throw e;
}
}
use of com.google.cloud.automl.v1.Dataset in project openhab1-addons by openhab.
the class Meter method read.
/**
* Reads data from meter
*
* @return a map of DataSet objects with the obis as key.
*/
public Map<String, DataSet> read() {
// the frequently executed code (polling) goes here ...
Map<String, DataSet> dataSetMap = new HashMap<String, DataSet>();
Connection connection = new Connection(config.getSerialPort(), config.getInitMessage(), config.getEchoHandling(), config.getBaudRateChangeDelay());
try {
try {
connection.open();
} catch (IOException e) {
logger.error("Failed to open serial port {}: {}", config.getSerialPort(), e.getMessage());
return dataSetMap;
}
List<DataSet> dataSets = null;
try {
dataSets = connection.read();
for (DataSet dataSet : dataSets) {
logger.debug("DataSet: {};{};{}", dataSet.getId(), dataSet.getValue(), dataSet.getUnit());
dataSetMap.put(dataSet.getId(), dataSet);
}
} catch (IOException e) {
logger.error("IOException while trying to read: {}", e.getMessage());
} catch (TimeoutException e) {
logger.error("Read attempt timed out");
}
} finally {
connection.close();
}
return dataSetMap;
}
use of com.google.cloud.automl.v1.Dataset in project java-automl by googleapis.
the class LanguageTextClassificationCreateDataset method createDataset.
// Create a dataset
static void createDataset(String projectId, String displayName) throws IOException, ExecutionException, InterruptedException {
// the "close" method on the client to safely clean up any remaining background resources.
try (AutoMlClient client = AutoMlClient.create()) {
// A resource that represents Google Cloud Platform location.
LocationName projectLocation = LocationName.of(projectId, "us-central1");
// Specify the classification type
// Types:
// MultiLabel: Multiple labels are allowed for one example.
// MultiClass: At most one label is allowed per example.
ClassificationType classificationType = ClassificationType.MULTILABEL;
// Specify the text classification type for the dataset.
TextClassificationDatasetMetadata metadata = TextClassificationDatasetMetadata.newBuilder().setClassificationType(classificationType).build();
Dataset dataset = Dataset.newBuilder().setDisplayName(displayName).setTextClassificationDatasetMetadata(metadata).build();
OperationFuture<Dataset, OperationMetadata> future = client.createDatasetAsync(projectLocation, dataset);
Dataset createdDataset = future.get();
// Display the dataset information.
System.out.format("Dataset name: %s\n", createdDataset.getName());
// To get the dataset id, you have to parse it out of the `name` field. As dataset Ids are
// required for other methods.
// Name Form: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`
String[] names = createdDataset.getName().split("/");
String datasetId = names[names.length - 1];
System.out.format("Dataset id: %s\n", datasetId);
}
}
use of com.google.cloud.automl.v1.Dataset in project java-automl by googleapis.
the class DatasetApi method importData.
// [START automl_translate_import_data]
/**
* Import sentence pairs to the dataset.
*
* @param projectId the Google Cloud Project ID.
* @param computeRegion the Region name. (e.g., "us-central1").
* @param datasetId the Id of the dataset.
* @param path the remote Path of the training data csv file.
*/
public static void importData(String projectId, String computeRegion, String datasetId, String path) throws IOException, InterruptedException, ExecutionException {
// Instantiates a client
try (AutoMlClient client = AutoMlClient.create()) {
// Get the complete path of the dataset.
DatasetName datasetFullId = DatasetName.of(projectId, computeRegion, datasetId);
GcsSource.Builder gcsSource = GcsSource.newBuilder();
// Get multiple Google Cloud Storage URIs to import data from
String[] inputUris = path.split(",");
for (String inputUri : inputUris) {
gcsSource.addInputUris(inputUri);
}
// Import data from the input URI
InputConfig inputConfig = InputConfig.newBuilder().setGcsSource(gcsSource).build();
System.out.println("Processing import...");
Empty response = client.importDataAsync(datasetFullId, inputConfig).get();
System.out.println(String.format("Dataset imported. %s", response));
}
}
Aggregations