use of org.finos.waltz.model.datatype.DataType in project waltz by khartec.
the class FlowClassificationRuleService method logInsert.
private void logInsert(Long ruleId, FlowClassificationRuleCreateCommand command, String username) {
String parentName = getParentEntityName(command.parentReference());
DataType dataType = dataTypeDao.getById(command.dataTypeId());
Application app = applicationDao.getById(command.applicationId());
if (app != null && dataType != null && parentName != null) {
String msg = format("Registered the flow classification rule with %s as the source app for type: %s for %s: %s", app.name(), dataType.name(), command.parentReference().kind().prettyName(), parentName);
multiLog(username, ruleId, command.parentReference(), dataType, app, msg, Operation.ADD);
}
}
use of org.finos.waltz.model.datatype.DataType in project waltz by khartec.
the class FlowSummaryWithTypesAndPhysicalsExport method main.
public static void main(String[] args) throws IOException {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
ApplicationIdSelectorFactory appIdSelectorFactory = new ApplicationIdSelectorFactory();
ApplicationDao applicationDao = ctx.getBean(ApplicationDao.class);
OrganisationalUnitDao organisationalUnitDao = ctx.getBean(OrganisationalUnitDao.class);
LogicalFlowDao logicalFlowDao = ctx.getBean(LogicalFlowDao.class);
LogicalFlowDecoratorDao decoratorDao = ctx.getBean(LogicalFlowDecoratorDao.class);
DataTypeDao dataTypeDao = ctx.getBean(DataTypeDao.class);
Select<Record1<Long>> appSelector = mkAppIdSelector(appIdSelectorFactory);
Select<Record1<Long>> logicalFlowSelector = mkLogicalFlowSelectorFromAppSelector(appSelector);
System.out.println("Loading apps");
Set<Application> allApps = fromCollection(applicationDao.findAll());
System.out.println("Loading in scope apps");
Set<Long> inScopeAppIds = toIds(applicationDao.findByAppIdSelector(appSelector));
System.out.println("Loading OUs");
List<OrganisationalUnit> allOUs = organisationalUnitDao.findAll();
System.out.println("Loading DTs");
List<DataType> allDataTypes = dataTypeDao.findAll();
System.out.println("Loading Logical Flows");
List<LogicalFlow> logicalFlows = logicalFlowDao.findBySelector(logicalFlowSelector);
System.out.println("Loading decorators");
List<DataTypeDecorator> decorators = decoratorDao.findByAppIdSelector(appSelector);
System.out.println("Loading phys flows");
Map<Long, Collection<Tuple7<Long, String, String, String, String, String, String>>> physicalsByLogical = loadPhysicalsByLogical(dsl, logicalFlowSelector);
System.out.println("Indexing");
Map<Optional<Long>, Application> appsById = indexByOptId(allApps);
Map<Optional<Long>, DataType> dataTypesById = indexByOptId(allDataTypes);
Map<Optional<Long>, OrganisationalUnit> ousById = indexByOptId(allOUs);
Map<Long, Collection<DataTypeDecorator>> decoratorsByLogicalFlowId = groupBy(DataTypeDecorator::dataFlowId, decorators);
System.out.println("Processing");
CsvListWriter csvWriter = setupCSVWriter();
logicalFlows.stream().filter(lf -> lf.source().kind() == EntityKind.APPLICATION && lf.target().kind() == EntityKind.APPLICATION).map(Tuple::tuple).map(t -> t.concat(appsById.get(Optional.of(t.v1.source().id())))).map(t -> t.concat(appsById.get(Optional.of(t.v1.target().id())))).filter(t -> t.v2 != null && t.v3 != null).map(t -> t.concat(ousById.get(Optional.of(t.v2.organisationalUnitId())))).map(t -> t.concat(ousById.get(Optional.of(t.v3.organisationalUnitId())))).map(t -> t.concat(decoratorsByLogicalFlowId.getOrDefault(t.v1.id().orElse(-1L), emptyList()).stream().filter(d -> d.decoratorEntity().kind() == EntityKind.DATA_TYPE).map(d -> dataTypesById.get(Optional.of(d.decoratorEntity().id()))).sorted(Comparator.comparing(NameProvider::name)).collect(Collectors.toList()))).map(t -> t.concat(inScopeAppIds.contains(t.v2.id().get()))).map(t -> t.concat(inScopeAppIds.contains(t.v3.id().get()))).flatMap(t -> physicalsByLogical.getOrDefault(t.v1.id().orElse(-1L), newArrayList(tuple(-1L, "-", "-", "-", "-", "-", "-"))).stream().map(p -> t.concat(p.skip1()))).map(t -> newArrayList(// src
t.v2.name(), t.v2.assetCode().map(ExternalIdValue::value).orElse(""), t.v2.applicationKind().name(), t.v2.entityLifecycleStatus().name(), // src OU
Optional.ofNullable(t.v4).map(NameProvider::name).orElse("?"), t.v7.toString(), // trg
t.v3.name(), t.v3.assetCode().map(ExternalIdValue::value).orElse(""), t.v3.applicationKind().name(), t.v3.entityLifecycleStatus().name(), // trg OU
Optional.ofNullable(t.v5).map(NameProvider::name).orElse("?"), t.v8.toString(), StringUtilities.joinUsing(t.v6, NameProvider::name, ","), t.v9, t.v10, t.v11, t.v12, t.v13, t.v14)).forEach(Unchecked.consumer(csvWriter::write));
}
use of org.finos.waltz.model.datatype.DataType in project waltz by khartec.
the class ChangeUnitGenerator method mkDataTypeChange.
private AttributeChangeRecord mkDataTypeChange(DSLContext dsl, ChangeUnit cu, PhysicalFlow flow, String name) {
List<DataType> allDataTypes = dsl.selectFrom(DATA_TYPE).fetch(DataTypeDao.TO_DOMAIN);
List<DataType> newDataTypes = randomPick(allDataTypes, randomIntBetween(1, 5));
String json = "[" + joinUsing(newDataTypes, d -> String.format("{\"dataTypeId\": %s}", d.id().get()), ",") + "]";
AttributeChangeRecord record = dsl.newRecord(ATTRIBUTE_CHANGE);
record.setChangeUnitId(cu.id().get());
record.setType("json");
record.setOldValue("[]");
record.setNewValue(json);
record.setName(name);
record.setLastUpdatedAt(DateTimeUtilities.nowUtcTimestamp());
record.setLastUpdatedBy("admin");
record.setProvenance(SAMPLE_DATA_PROVENANCE);
return record;
}
use of org.finos.waltz.model.datatype.DataType in project waltz by khartec.
the class ChangeUnitGenerator method mkAttributeChanges.
private List<AttributeChangeRecord> mkAttributeChanges(DSLContext dsl, List<PhysicalFlow> physicalFlows) {
List<ChangeUnit> modifyCUs = dsl.selectFrom(CHANGE_UNIT).where(CHANGE_UNIT.ACTION.eq(ChangeAction.MODIFY.name())).fetch(ChangeUnitDao.TO_DOMAIN_MAPPER);
List<String> attributes = ListUtilities.asList("criticality", "frequency", "DataType");
Map<Long, PhysicalFlow> flowsById = MapUtilities.indexBy(f -> f.id().get(), physicalFlows);
List<AttributeChangeRecord> attributeChanges = modifyCUs.stream().flatMap(cu -> randomlySizedIntStream(1, 2).mapToObj(idx -> randomPick(attributes)).map(attribute -> {
PhysicalFlow flow = flowsById.get(cu.subjectEntity().id());
switch(attribute) {
case "criticality":
return mkCriticalityChange(dsl, cu, flow, attribute);
case "frequency":
return mkFrequencyChange(dsl, cu, flow, attribute);
case "DataType":
return mkDataTypeChange(dsl, cu, flow, attribute);
default:
throw new UnsupportedOperationException("Attribute change not supported: " + attribute);
}
})).collect(toList());
return attributeChanges;
}
use of org.finos.waltz.model.datatype.DataType in project waltz by khartec.
the class DataTypeDecoratorServiceTest method findSuggestedByEntityRef.
@Test
public void findSuggestedByEntityRef() {
String username = mkName("updateDecorators");
EntityReference a = appHelper.createNewApp("a", ouIds.a);
assertThrows(UnsupportedOperationException.class, () -> dtdSvc.findSuggestedByEntityRef(a), "Throw exception if not a logical data flow or physical spec");
EntityReference b = appHelper.createNewApp("b", ouIds.a1);
LogicalFlow flow = lfHelper.createLogicalFlow(a, b);
Collection<DataType> suggestedWhenNoFlows = dtdSvc.findSuggestedByEntityRef(flow.entityReference());
assertEquals(emptyList(), suggestedWhenNoFlows, "If no flows associated to entity should return empty list");
EntityReference c = appHelper.createNewApp("b", ouIds.a1);
LogicalFlow flow2 = lfHelper.createLogicalFlow(b, c);
Long dtId = dataTypeHelper.createDataType("updateDecorators");
Long dtId2 = dataTypeHelper.createDataType("updateDecorators2");
dtdSvc.updateDecorators(username, flow.entityReference(), asSet(dtId), emptySet());
dtdSvc.updateDecorators(username, flow2.entityReference(), asSet(dtId, dtId2), emptySet());
Collection<DataType> suggestedWhenUpstream = dtdSvc.findSuggestedByEntityRef(flow.entityReference());
assertEquals(asSet(dtId), map(suggestedWhenUpstream, d -> d.id().get()), "Should return suggested data types based on the upstream app");
Collection<DataType> suggestedWhenSrcHasUpstreamAndDownStream = dtdSvc.findSuggestedByEntityRef(flow2.entityReference());
assertEquals(asSet(dtId, dtId2), map(suggestedWhenSrcHasUpstreamAndDownStream, d -> d.id().get()), "Should return suggested data types based on up and down stream flows on the upstream app");
Long specId = psHelper.createPhysicalSpec(a, "updateDecorators");
pfHelper.createPhysicalFlow(flow.entityReference().id(), specId, "updateDecorators");
Collection<DataType> suggestedForPsWhenUpstream = dtdSvc.findSuggestedByEntityRef(mkRef(EntityKind.PHYSICAL_SPECIFICATION, specId));
assertEquals(asSet(dtId), map(suggestedForPsWhenUpstream, d -> d.id().get()), "Should return suggested data types based on the upstream app");
Long specId2 = psHelper.createPhysicalSpec(a, "updateDecorators");
Collection<DataType> specNotInvolvedInFlows = dtdSvc.findSuggestedByEntityRef(mkRef(EntityKind.PHYSICAL_SPECIFICATION, specId2));
assertEquals(emptyList(), specNotInvolvedInFlows, "Spec not involved in flows should return empty list");
}
Aggregations