use of com.khartec.waltz.service.logical_flow.LogicalFlowService in project waltz by khartec.
the class FlowGenerator method main.
public static void main(String[] args) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
AuthoritativeSourceDao authSourceDao = ctx.getBean(AuthoritativeSourceDao.class);
ApplicationService applicationDao = ctx.getBean(ApplicationService.class);
LogicalFlowService dataFlowDao = ctx.getBean(LogicalFlowService.class);
OrganisationalUnitService orgUnitDao = ctx.getBean(OrganisationalUnitService.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
List<AuthoritativeSource> authSources = authSourceDao.findByEntityKind(EntityKind.ORG_UNIT);
List<Application> apps = applicationDao.findAll();
List<OrganisationalUnit> orgUnits = orgUnitDao.findAll();
Set<LogicalFlow> expectedFlows = authSources.stream().flatMap(a -> {
long orgUnitId = a.parentReference().id();
return IntStream.range(0, rnd.nextInt(40)).mapToObj(i -> ImmutableLogicalFlow.builder().source(a.applicationReference()).target(randomAppPick(apps, orgUnitId)).build());
}).collect(toSet());
Set<LogicalFlow> probableFlows = authSources.stream().flatMap(a -> IntStream.range(0, rnd.nextInt(30)).mapToObj(i -> ImmutableLogicalFlow.builder().source(a.applicationReference()).target(randomAppPick(apps, randomPick(orgUnits).id().get())).build())).collect(toSet());
Set<LogicalFlow> randomFlows = apps.stream().flatMap(a -> IntStream.range(0, rnd.nextInt(5)).mapToObj(i -> {
EntityReference target = randomAppPick(apps, randomPick(orgUnits).id().get());
return ImmutableLogicalFlow.builder().source(a.entityReference()).target(target).lastUpdatedBy("admin").build();
})).collect(toSet());
dsl.deleteFrom(LOGICAL_FLOW).execute();
Set<LogicalFlow> all = new HashSet<>();
all.addAll(randomFlows);
all.addAll(expectedFlows);
all.addAll(probableFlows);
System.out.println("--- saving: " + all.size());
Set<LogicalFlowRecord> records = SetUtilities.map(all, df -> LogicalFlowDao.TO_RECORD_MAPPER.apply(df, dsl));
dsl.batchStore(records).execute();
System.out.println("--- done");
}
use of com.khartec.waltz.service.logical_flow.LogicalFlowService in project waltz by khartec.
the class DataFlowHarness method main.
public static void main(String[] args) throws ParseException {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
LogicalFlowService service = ctx.getBean(LogicalFlowService.class);
LogicalFlowDao dao = ctx.getBean(LogicalFlowDao.class);
LogicalFlowIdSelectorFactory factory = ctx.getBean(LogicalFlowIdSelectorFactory.class);
IdSelectionOptions options = IdSelectionOptions.mkOpts(EntityReference.mkRef(EntityKind.ORG_UNIT, 5000), HierarchyQueryScope.CHILDREN);
Select<Record1<Long>> selector = factory.apply(options);
System.out.println(selector);
List<LogicalFlow> flows = dao.findBySelector(selector);
flows.forEach(System.out::println);
// by data type
EntityReference dataType = EntityReference.mkRef(EntityKind.DATA_TYPE, 6000);
IdSelectionOptions dataTypeOptions = IdSelectionOptions.mkOpts(dataType, HierarchyQueryScope.CHILDREN);
List<LogicalFlow> byDataTypeFlows = service.findBySelector(dataTypeOptions);
byDataTypeFlows.forEach(System.out::println);
System.out.println(byDataTypeFlows.size());
}
use of com.khartec.waltz.service.logical_flow.LogicalFlowService in project waltz by khartec.
the class LogicalFlowStatsHarness method main.
public static void main(String[] args) throws ParseException {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
LogicalFlowService service = ctx.getBean(LogicalFlowService.class);
IdSelectionOptions options = IdSelectionOptions.mkOpts(EntityReference.mkRef(EntityKind.ORG_UNIT, 50L), HierarchyQueryScope.CHILDREN);
service.calculateStats(options);
System.out.println("--done");
}
use of com.khartec.waltz.service.logical_flow.LogicalFlowService in project waltz by khartec.
the class AdditiveFlowGenerator method main.
public static void main(String[] args) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
AuthoritativeSourceDao authSourceDao = ctx.getBean(AuthoritativeSourceDao.class);
ApplicationService applicationDao = ctx.getBean(ApplicationService.class);
LogicalFlowService dataFlowDao = ctx.getBean(LogicalFlowService.class);
OrganisationalUnitService orgUnitDao = ctx.getBean(OrganisationalUnitService.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
List<AuthoritativeSource> authSources = authSourceDao.findByEntityKind(EntityKind.ORG_UNIT);
List<Application> apps = applicationDao.findAll();
Application referenceApplication = applicationDao.getById(APPLICATION_ID);
List<OrganisationalUnit> orgUnits = orgUnitDao.findAll();
Set<LogicalFlow> expectedFlows = authSources.stream().map(a -> {
long orgUnitId = a.parentReference().id();
if (referenceApplication.organisationalUnitId().equals(orgUnitId)) {
return Optional.of(ImmutableLogicalFlow.builder().source(a.applicationReference()).target(referenceApplication.entityReference()).build());
} else {
return Optional.<LogicalFlow>empty();
}
}).filter(o -> o.isPresent()).map(o -> o.get()).collect(toSet());
Set<LogicalFlow> randomTargetFlows = IntStream.range(0, rnd.nextInt(APPROX_FLOW_GENERATION_COUNT / 2)).mapToObj(i -> {
EntityReference target = randomAppPick(apps, randomPick(orgUnits).id().get());
return ImmutableLogicalFlow.builder().source(referenceApplication.entityReference()).target(target).build();
}).collect(toSet());
Set<LogicalFlow> randomSourceFlows = IntStream.range(0, rnd.nextInt(APPROX_FLOW_GENERATION_COUNT / 2)).mapToObj(i -> {
EntityReference source = randomAppPick(apps, randomPick(orgUnits).id().get());
return ImmutableLogicalFlow.builder().source(source).target(referenceApplication.entityReference()).build();
}).collect(toSet());
dsl.delete(LOGICAL_FLOW).where(LOGICAL_FLOW.SOURCE_ENTITY_ID.eq(APPLICATION_ID)).or(LOGICAL_FLOW.TARGET_ENTITY_ID.eq(APPLICATION_ID)).execute();
Set<LogicalFlow> all = new HashSet<>();
all.addAll(randomTargetFlows);
all.addAll(randomSourceFlows);
all.addAll(expectedFlows);
System.out.println("--- saving: " + all.size());
Set<LogicalFlowRecord> records = SetUtilities.map(all, df -> LogicalFlowDao.TO_RECORD_MAPPER.apply(df, dsl));
dsl.batchStore(records).execute();
System.out.println("--- done");
}
Aggregations