use of com.google.cloud.aiplatform.v1.Port in project osate2 by osate.
the class CreateConnectionsSwitch method doModeTransitionConnections.
/**
* As we are following connection declarations we need to check whether the
* destination of the connection is named in one of the mode transitions of
* the component instance that is the destination of the connection being
* added
*
* @param parentci
* The component that is the context in which the connections are
* declared
* @param pci
* PortConnectionInstance that is being created
* @param conn
* connection being added to the ConnectionInstance
* @return true if we created a ModetransitionInstance
*/
private boolean doModeTransitionConnections(final ComponentInstance parentci, ConnectionInfo connInfo, Connection conn) {
boolean didTransition = false;
if (!(conn instanceof PortConnection || conn instanceof FeatureGroupConnection)) {
return false;
}
ComponentInstance parent = null;
Context fc = conn.getAllDestinationContext();
Element connContext = null;
if (fc instanceof ComponentImplementation || fc instanceof FeatureGroup) {
// we
// have
// an
// outgoing
// connection
parent = (ComponentInstance) parentci.eContainer();
connContext = parentci.getSubcomponent();
} else if (fc instanceof Subcomponent) {
parent = parentci.findSubcomponentInstance((Subcomponent) fc);
connContext = ((Subcomponent) fc).getAllClassifier();
}
if (parent == null) {
return false;
}
EList<ModeTransitionInstance> mtl = parent.getModeTransitionInstances();
Feature f = (Feature) conn.getAllDestination();
for (ModeTransitionInstance mti : mtl) {
ModeTransition mt = mti.getModeTransition();
Context co = null;
for (ModeTransitionTrigger trigger : mt.getOwnedTriggers()) {
TriggerPort tp = trigger.getTriggerPort();
if (tp instanceof Port) {
Port o = (Port) tp;
co = trigger.getContext();
NamedElement context = co;
if (context instanceof FeatureGroup) {
context = parent.getSubcomponent().getAllClassifier();
}
if (f == o && context == connContext) {
final ConnectionInstance mtci = addConnectionInstance(parentci.getSystemInstance(), connInfo.convertToModeTransition(), mti);
fillInModes(mtci);
fillInModeTransitions(mtci);
didTransition = true;
}
} else {
// TODO-LW: what if it's a processor port or internal event?
}
}
}
return didTransition;
}
use of com.google.cloud.aiplatform.v1.Port in project osate2 by osate.
the class CreateConnectionsSwitch method doModeTransitionConnections.
// ------------------------------------------------------------------------
// Methods related to mode transition connections
// ------------------------------------------------------------------------
/**
* handles the situation that a mode transition may name an event port in a
* thread (or other leaf component instance) and that port is not the
* destination of a connection instance - it is the start of a connection
* instance
*
* @param ci
* ComponentInstance
* @param fi
* FeatureInstance
* @return true if we created a ModetransitionInstance
*/
private boolean doModeTransitionConnections(ComponentInstance ci, FeatureInstance fi) {
boolean didTransition = false;
if (fi.getCategory() == FeatureCategory.EVENT_PORT) {
Subcomponent sub = ci.getSubcomponent();
Feature f = fi.getFeature();
for (ModeTransitionInstance mti : ci.getContainingComponentInstance().getModeTransitionInstances()) {
for (ModeTransitionTrigger trigger : mti.getModeTransition().getOwnedTriggers()) {
TriggerPort tp = trigger.getTriggerPort();
if (tp instanceof Port) {
Port p = (Port) tp;
Context c = trigger.getContext();
if (f == p && c == sub) {
addConnectionInstance(ci.getSystemInstance(), ConnectionInfo.newModeTransition(fi), mti);
didTransition = true;
}
} else {
// TODO-LW: what if it's a processor port or internal
// event?
}
}
}
}
return didTransition;
}
use of com.google.cloud.aiplatform.v1.Port in project knime-core by knime.
the class NodeDescription41Proxy method validateGroupIdentifiers.
private static void validateGroupIdentifiers(final String nodeName, final String portLocation, final Map<String, ExtendablePortGroup> extendablePortGrps, final List<? extends DynPort> portGrps, final Predicate<ExtendablePortGroup> portLocationPredicate) {
// get all extendable port group names for the given port location predicate
final String[] extendablePortGrpNames = extendablePortGrps.entrySet().stream().filter(e -> portLocationPredicate.test(e.getValue())).map(e -> e.getKey()).toArray(String[]::new);
// each port group entry (defined via node description) requires one extendable port group (defined via factory)
if (extendablePortGrpNames.length < portGrps.size()) {
logger.coding(String.format(SCHEMA_VIOLATION_MSG, nodeName));
logger.coding("Node description defines more (extendable) " + portLocation + " port groups than its factory");
return;
}
// each extendable port group (defined via factory) requires one port group entry (defined via node description)
if (extendablePortGrpNames.length > portGrps.size()) {
logger.coding(String.format(SCHEMA_VIOLATION_MSG, nodeName));
logger.coding("node factory defines more (extendable) " + portLocation + " port groups than its node description");
return;
}
// ensure same names
for (final DynPort portGrp : portGrps) {
// typically very small array and therefore not costly
if (!Arrays.stream(extendablePortGrpNames).anyMatch(s -> s.equals(portGrp.getGroupIdentifier()))) {
logger.coding(String.format(SCHEMA_VIOLATION_MSG, nodeName));
logger.coding("node description and factory contain different (extendable) " + portLocation + " port group identfier");
return;
}
}
// the order has to be the same
int idx = 0;
for (final DynPort portGrp : portGrps) {
if (!portGrp.getGroupIdentifier().equals(extendablePortGrpNames[idx++])) {
logger.coding(String.format(SCHEMA_VIOLATION_MSG, nodeName));
logger.coding("The (extendable) " + portLocation + " port group ordering differs between factory and node description.");
return;
}
}
}
use of com.google.cloud.aiplatform.v1.Port in project knime-core by knime.
the class NodeDescription41Proxy method appendPortDescriptions.
private static void appendPortDescriptions(final Ports ports, final Function<Ports, List<? extends Port>> getPorts, final Function<Ports, Port> createPort, final List<Pair<DynPort, Integer>> grpDescriptionToAdd) {
final List<? extends Port> portList = getPorts.apply(ports);
long pos = 0;
long offset = 0;
final Map<Long, Long> m_idxOffsetsMap = new HashMap<>();
m_idxOffsetsMap.put(0l, 0l);
final Iterator<Pair<DynPort, Integer>> iterator = grpDescriptionToAdd.iterator();
Pair<DynPort, Integer> curPortGrp = iterator.next();
// update the indices of the existent port descriptions
for (final Port port : portList) {
// take care of the possibility of having several group with the same insertion index
while (curPortGrp != null && curPortGrp.getFirst().getInsertBefore().longValue() == pos) {
offset += curPortGrp.getSecond();
if (iterator.hasNext()) {
curPortGrp = iterator.next();
} else {
curPortGrp = null;
}
}
++pos;
m_idxOffsetsMap.put(pos, offset + 1);
port.setIndex(BigInteger.valueOf(offset));
++offset;
}
// no need to add the remaining curPortGrp elements as there can only be one and its insert-before index has to
// be equal to the current pos
curPortGrp = null;
// create the new port descriptions (handles having several group entries with the same insertion index)
for (final Pair<DynPort, Integer> dynPortPair : grpDescriptionToAdd) {
final DynPort dynPort = dynPortPair.getFirst();
pos = dynPort.getInsertBefore().longValue();
offset = m_idxOffsetsMap.get(pos);
final long size = dynPortPair.getSecond();
for (int i = 0; i < size; i++) {
final Port port = createPort.apply(ports);
port.newCursor().setTextValue(dynPort.newCursor().getTextValue());
port.addI(DYNAMIC_PORT_SUFFIX);
port.setIndex(BigInteger.valueOf(offset++));
port.setName(dynPort.getName());
}
// in case we have several dyn ports with the same insertion index
m_idxOffsetsMap.put(pos, offset);
}
}
use of com.google.cloud.aiplatform.v1.Port in project java-aiplatform by googleapis.
the class CreateTrainingPipelineTextClassificationSample method createTrainingPipelineTextClassificationSample.
static void createTrainingPipelineTextClassificationSample(String project, String trainingPipelineDisplayName, String datasetId, String modelDisplayName) throws IOException {
PipelineServiceSettings pipelineServiceSettings = PipelineServiceSettings.newBuilder().setEndpoint("us-central1-aiplatform.googleapis.com:443").build();
// the "close" method on the client to safely clean up any remaining background resources.
try (PipelineServiceClient pipelineServiceClient = PipelineServiceClient.create(pipelineServiceSettings)) {
String location = "us-central1";
String trainingTaskDefinition = "gs://google-cloud-aiplatform/schema/trainingjob/definition/" + "automl_text_classification_1.0.0.yaml";
LocationName locationName = LocationName.of(project, location);
AutoMlTextClassificationInputs trainingTaskInputs = AutoMlTextClassificationInputs.newBuilder().setMultiLabel(false).build();
InputDataConfig trainingInputDataConfig = InputDataConfig.newBuilder().setDatasetId(datasetId).build();
Model model = Model.newBuilder().setDisplayName(modelDisplayName).build();
TrainingPipeline trainingPipeline = TrainingPipeline.newBuilder().setDisplayName(trainingPipelineDisplayName).setTrainingTaskDefinition(trainingTaskDefinition).setTrainingTaskInputs(ValueConverter.toValue(trainingTaskInputs)).setInputDataConfig(trainingInputDataConfig).setModelToUpload(model).build();
TrainingPipeline trainingPipelineResponse = pipelineServiceClient.createTrainingPipeline(locationName, trainingPipeline);
System.out.println("Create Training Pipeline Text Classification Response");
System.out.format("\tName: %s\n", trainingPipelineResponse.getName());
System.out.format("\tDisplay Name: %s\n", trainingPipelineResponse.getDisplayName());
System.out.format("\tTraining Task Definition %s\n", trainingPipelineResponse.getTrainingTaskDefinition());
System.out.format("\tTraining Task Inputs: %s\n", trainingPipelineResponse.getTrainingTaskInputs());
System.out.format("\tTraining Task Metadata: %s\n", trainingPipelineResponse.getTrainingTaskMetadata());
System.out.format("State: %s\n", trainingPipelineResponse.getState());
System.out.format("\tCreate Time: %s\n", trainingPipelineResponse.getCreateTime());
System.out.format("\tStartTime %s\n", trainingPipelineResponse.getStartTime());
System.out.format("\tEnd Time: %s\n", trainingPipelineResponse.getEndTime());
System.out.format("\tUpdate Time: %s\n", trainingPipelineResponse.getUpdateTime());
System.out.format("\tLabels: %s\n", trainingPipelineResponse.getLabelsMap());
InputDataConfig inputDataConfig = trainingPipelineResponse.getInputDataConfig();
System.out.println("\tInput Data Config");
System.out.format("\t\tDataset Id: %s", inputDataConfig.getDatasetId());
System.out.format("\t\tAnnotations Filter: %s\n", inputDataConfig.getAnnotationsFilter());
FractionSplit fractionSplit = inputDataConfig.getFractionSplit();
System.out.println("\t\tFraction Split");
System.out.format("\t\t\tTraining Fraction: %s\n", fractionSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", fractionSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", fractionSplit.getTestFraction());
FilterSplit filterSplit = inputDataConfig.getFilterSplit();
System.out.println("\t\tFilter Split");
System.out.format("\t\t\tTraining Filter: %s\n", filterSplit.getTrainingFilter());
System.out.format("\t\t\tValidation Filter: %s\n", filterSplit.getValidationFilter());
System.out.format("\t\t\tTest Filter: %s\n", filterSplit.getTestFilter());
PredefinedSplit predefinedSplit = inputDataConfig.getPredefinedSplit();
System.out.println("\t\tPredefined Split");
System.out.format("\t\t\tKey: %s\n", predefinedSplit.getKey());
TimestampSplit timestampSplit = inputDataConfig.getTimestampSplit();
System.out.println("\t\tTimestamp Split");
System.out.format("\t\t\tTraining Fraction: %s\n", timestampSplit.getTrainingFraction());
System.out.format("\t\t\tValidation Fraction: %s\n", timestampSplit.getValidationFraction());
System.out.format("\t\t\tTest Fraction: %s\n", timestampSplit.getTestFraction());
System.out.format("\t\t\tKey: %s\n", timestampSplit.getKey());
Model modelResponse = trainingPipelineResponse.getModelToUpload();
System.out.println("\tModel To Upload");
System.out.format("\t\tName: %s\n", modelResponse.getName());
System.out.format("\t\tDisplay Name: %s\n", modelResponse.getDisplayName());
System.out.format("\t\tDescription: %s\n", modelResponse.getDescription());
System.out.format("\t\tMetadata Schema Uri: %s\n", modelResponse.getMetadataSchemaUri());
System.out.format("\t\tMetadata: %s\n", modelResponse.getMetadata());
System.out.format("\t\tTraining Pipeline: %s\n", modelResponse.getTrainingPipeline());
System.out.format("\t\tArtifact Uri: %s\n", modelResponse.getArtifactUri());
System.out.format("\t\tSupported Deployment Resources Types: %s\n", modelResponse.getSupportedDeploymentResourcesTypesList());
System.out.format("\t\tSupported Input Storage Formats: %s\n", modelResponse.getSupportedInputStorageFormatsList());
System.out.format("\t\tSupported Output Storage Formats: %s\n", modelResponse.getSupportedOutputStorageFormatsList());
System.out.format("\t\tCreate Time: %s\n", modelResponse.getCreateTime());
System.out.format("\t\tUpdate Time: %s\n", modelResponse.getUpdateTime());
System.out.format("\t\tLabels: %sn\n", modelResponse.getLabelsMap());
PredictSchemata predictSchemata = modelResponse.getPredictSchemata();
System.out.println("\t\tPredict Schemata");
System.out.format("\t\t\tInstance Schema Uri: %s\n", predictSchemata.getInstanceSchemaUri());
System.out.format("\t\t\tParameters Schema Uri: %s\n", predictSchemata.getParametersSchemaUri());
System.out.format("\t\t\tPrediction Schema Uri: %s\n", predictSchemata.getPredictionSchemaUri());
for (ExportFormat exportFormat : modelResponse.getSupportedExportFormatsList()) {
System.out.println("\t\tSupported Export Format");
System.out.format("\t\t\tId: %s\n", exportFormat.getId());
}
ModelContainerSpec modelContainerSpec = modelResponse.getContainerSpec();
System.out.println("\t\tContainer Spec");
System.out.format("\t\t\tImage Uri: %s\n", modelContainerSpec.getImageUri());
System.out.format("\t\t\tCommand: %s\n", modelContainerSpec.getCommandList());
System.out.format("\t\t\tArgs: %s\n", modelContainerSpec.getArgsList());
System.out.format("\t\t\tPredict Route: %s\n", modelContainerSpec.getPredictRoute());
System.out.format("\t\t\tHealth Route: %s\n", modelContainerSpec.getHealthRoute());
for (EnvVar envVar : modelContainerSpec.getEnvList()) {
System.out.println("\t\t\tEnv");
System.out.format("\t\t\t\tName: %s\n", envVar.getName());
System.out.format("\t\t\t\tValue: %s\n", envVar.getValue());
}
for (Port port : modelContainerSpec.getPortsList()) {
System.out.println("\t\t\tPort");
System.out.format("\t\t\t\tContainer Port: %s\n", port.getContainerPort());
}
for (DeployedModelRef deployedModelRef : modelResponse.getDeployedModelsList()) {
System.out.println("\t\tDeployed Model");
System.out.format("\t\t\tEndpoint: %s\n", deployedModelRef.getEndpoint());
System.out.format("\t\t\tDeployed Model Id: %s\n", deployedModelRef.getDeployedModelId());
}
Status status = trainingPipelineResponse.getError();
System.out.println("\tError");
System.out.format("\t\tCode: %s\n", status.getCode());
System.out.format("\t\tMessage: %s\n", status.getMessage());
}
}
Aggregations