use of org.knime.cloud.aws.util.AmazonConnectionInformationPortObject in project knime-cloud by knime.
the class BaseComprehendNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
if (inObjects == null || inObjects.length != 2) {
throw new InvalidSettingsException("Invalid input data. Expected two inputs.");
}
final CloudConnectionInformation cxnInfo = ((AmazonConnectionInformationPortObject) inObjects[CNX_PORT_IDX]).getConnectionInformation();
LOGGER.info("Using region: " + cxnInfo.getHost());
// Create computation object for this operation.
final BufferedDataTable table = (BufferedDataTable) inObjects[DATA_PORT_IDX];
final DataTableSpec inputTableSpec = table.getDataTableSpec();
final ComprehendOperation op = getOperationInstance(cxnInfo, generateOutputTableSpec(inputTableSpec), m_textColumnName.getStringValue());
// Run the operation over the entire input.
return new BufferedDataTable[] { op.compute(exec, table) };
}
use of org.knime.cloud.aws.util.AmazonConnectionInformationPortObject in project knime-cloud by knime.
the class AmazonPersonalizeCreateCampaignNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
final CloudConnectionInformation cxnInfo = ((AmazonConnectionInformationPortObject) inObjects[0]).getConnectionInformation();
try (final AmazonPersonalizeConnection personalizeConnection = new AmazonPersonalizeConnection(cxnInfo)) {
final AmazonPersonalize personalizeClient = personalizeConnection.getClient();
final CreateCampaignRequest createCampaignRequest = new CreateCampaignRequest();
final CreateCampaignResult campaign = personalizeClient.createCampaign(createCampaignRequest.withName(m_settings.getCampaignName()).withSolutionVersionArn(m_settings.getSolutionVersion().getARN()).withMinProvisionedTPS(m_settings.getMinProvisionedTPS()));
// TODO Test update of existing campaign
try {
final DescribeCampaignRequest describeCampaignRequest = new DescribeCampaignRequest().withCampaignArn(campaign.getCampaignArn());
AmazonPersonalizeUtils.waitUntilActive(() -> {
final DescribeCampaignResult campaignDescription = personalizeClient.describeCampaign(describeCampaignRequest);
final String status = campaignDescription.getCampaign().getStatus();
exec.setMessage("Creating campaign (Status: " + status + ")");
if (status.equals(Status.CREATED_FAILED.getStatus())) {
personalizeClient.deleteCampaign(new DeleteCampaignRequest().withCampaignArn(campaignDescription.getCampaign().getCampaignArn()));
throw new IllegalStateException("No campaign has been created. Reason: " + campaignDescription.getCampaign().getFailureReason());
}
return status.equals(Status.ACTIVE.getStatus());
}, 1000);
} catch (InterruptedException e) {
// TODO
throw e;
}
if (m_settings.isOutputCampaignArnAsVar()) {
pushFlowVariableString("campaign-ARN", campaign.getCampaignArn());
}
}
return null;
}
use of org.knime.cloud.aws.util.AmazonConnectionInformationPortObject in project knime-cloud by knime.
the class AmazonPersonalizeCreateSolutionVersionNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
final CloudConnectionInformation cxnInfo = ((AmazonConnectionInformationPortObject) inObjects[0]).getConnectionInformation();
try (final AmazonPersonalizeConnection personalizeConnection = new AmazonPersonalizeConnection(cxnInfo)) {
final AmazonPersonalize personalizeClient = personalizeConnection.getClient();
// Create solution configuration or use existing one
final String solutionArn;
if (m_settings.isCreateNewSolution()) {
solutionArn = createSolution(personalizeClient);
} else {
solutionArn = m_settings.getExistingSolution().getARN();
}
// Wait until the solution is active
final DescribeSolutionRequest describeSolutionRequest = new DescribeSolutionRequest().withSolutionArn(solutionArn);
AmazonPersonalizeUtils.waitUntilActive(() -> {
DescribeSolutionResult describeSolution = personalizeClient.describeSolution(describeSolutionRequest);
final String status = describeSolution.getSolution().getStatus();
exec.setMessage("Creating solution configuration (Status: " + status + ")");
return status.equals(Status.ACTIVE.getStatus());
}, 100);
exec.setProgress(0.5);
// Create solution version
final String solutionVersionArn = personalizeClient.createSolutionVersion(new CreateSolutionVersionRequest().withSolutionArn(solutionArn)).getSolutionVersionArn();
// Wait until solution version is active (or failed)
final DescribeSolutionVersionRequest describeSolutionVersionRequest = new DescribeSolutionVersionRequest().withSolutionVersionArn(solutionVersionArn);
AmazonPersonalizeUtils.waitUntilActive(() -> {
final DescribeSolutionVersionResult solutionVersionDescription = personalizeClient.describeSolutionVersion(describeSolutionVersionRequest);
final String status = solutionVersionDescription.getSolutionVersion().getStatus();
exec.setMessage("Creating solution version (Status: " + status + ")");
if (status.equals(Status.CREATED_FAILED.getStatus())) {
throw new IllegalStateException("No solution version has been created. Reason: " + solutionVersionDescription.getSolutionVersion().getFailureReason());
}
return status.equals(Status.ACTIVE.getStatus());
}, 2000);
// Retrieve the recipe type to put it into the output
final DescribeSolutionVersionResult solutionVersionDescription = personalizeClient.describeSolutionVersion(describeSolutionVersionRequest);
final String recipeType = personalizeClient.describeRecipe(new DescribeRecipeRequest().withRecipeArn(solutionVersionDescription.getSolutionVersion().getRecipeArn())).getRecipe().getRecipeType();
// final String recipeType = personalizeClient.describeRecipe(new DescribeRecipeRequest().withRecipeArn(
// personalizeClient.describeSolution(new DescribeSolutionRequest().withSolutionArn(solutionArn))
// .getSolution().getRecipeArn()))
// .getRecipe().getRecipeType();
// Create output
final Map<String, Double> metrics = personalizeClient.getSolutionMetrics(new GetSolutionMetricsRequest().withSolutionVersionArn(solutionVersionArn)).getMetrics();
if (m_settings.isOutputSolutionVersionArnAsVar()) {
pushFlowVariableString("solution-version-ARN", solutionVersionArn);
}
return new PortObject[] { createOutput(exec, solutionVersionArn, recipeType, metrics) };
}
}
use of org.knime.cloud.aws.util.AmazonConnectionInformationPortObject in project knime-cloud by knime.
the class TranslateNodeModel method execute.
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
if (inObjects == null || inObjects.length != 2) {
throw new InvalidSettingsException("Invalid input data. Expected two inputs.");
}
final CloudConnectionInformation cxnInfo = ((AmazonConnectionInformationPortObject) inObjects[CNX_PORT_IDX]).getConnectionInformation();
LOGGER.info("Using region: " + cxnInfo.getHost());
// Access the input data table
final BufferedDataTable table = (BufferedDataTable) inObjects[DATA_PORT_IDX];
// Create computation object for the entity operation.
final TranslateOperation translateOp = new TranslateOperation(cxnInfo, m_textColumnName.getStringValue(), TranslateUtils.getSourceLanguageMap().getOrDefault(m_sourceLanguage.getStringValue(), "auto"), TranslateUtils.getSourceLanguageMap().getOrDefault(m_targetLanguage.getStringValue(), "en"), createNewDataTableSpec(table.getDataTableSpec()));
// Run the operation over the entire input.
return new BufferedDataTable[] { translateOp.compute(exec, table) };
}
use of org.knime.cloud.aws.util.AmazonConnectionInformationPortObject in project knime-cloud by knime.
the class S3ConnectorNodeModel method execute.
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
m_awsConnectionInfo = (AmazonConnectionInformationPortObject) inObjects[0];
final CloudConnectionInformation conInfo = m_awsConnectionInfo.getConnectionInformation();
final S3FSConnectionConfig config = m_settings.toFSConnectionConfig(conInfo, getCredentialsProvider());
m_fsConn = new S3FSConnection(config);
FSConnectionRegistry.getInstance().register(m_fsId, m_fsConn);
if (conInfo.isUseAnonymous()) {
setWarningMessage("You are using anonymous credentials. File browsing will only work inside public buckets.");
} else {
testFileSystemConnection(m_fsConn);
}
return new PortObject[] { new FileSystemPortObject(createSpec()) };
}
Aggregations