use of com.khartec.waltz.data.entity_statistic.EntityStatisticValueDao in project waltz by khartec.
the class EntityStatisticHarness method main.
public static void main(String[] args) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
EntityStatisticValueDao dao = ctx.getBean(EntityStatisticValueDao.class);
EntityStatisticService service = ctx.getBean(EntityStatisticService.class);
/*
select
outcome,
sum(CASE ISNUMERIC(value) when 1 then cast(value as BIGINT) else 0 end)
from entity_statistic_value
where statistic_id = 20010
GROUP BY outcome;
*/
/*
AggregateFunction<BigDecimal> summer = DSL.sum(DSL.cast(esv.VALUE, Long.class));
dsl.select(esv.OUTCOME, summer)
.from(esv)
.where(esv.STATISTIC_ID.eq(20010L))
.groupBy(esv.OUTCOME)
.fetch()
.forEach(System.out::println);
*/
IdSelectionOptions selectionOptions = ImmutableIdSelectionOptions.builder().entityReference(EntityReference.mkRef(EntityKind.ORG_UNIT, 70)).scope(HierarchyQueryScope.CHILDREN).build();
// count by value
List<TallyPack<String>> countByValueTallyPacks = service.calculateHistoricStatTally(10100L, RollupKind.COUNT_BY_ENTITY, selectionOptions, Duration.WEEK);
System.out.println(countByValueTallyPacks);
// sum by value
List<TallyPack<String>> sumByValueTallyPacks = service.calculateHistoricStatTally(20010L, RollupKind.SUM_BY_VALUE, selectionOptions, Duration.YEAR);
System.out.println(sumByValueTallyPacks);
// pre-computed
List<TallyPack<String>> preComputedTallyPacks = service.calculateHistoricStatTally(11000L, RollupKind.NONE, selectionOptions, Duration.ALL);
System.out.println(preComputedTallyPacks);
System.out.println("done");
}
use of com.khartec.waltz.data.entity_statistic.EntityStatisticValueDao in project waltz by khartec.
the class EntityStatisticGenerator method createPreComputedStatsFor.
private void createPreComputedStatsFor(EntityStatisticDefinition defn, OrganisationalUnit[] orgUnits, EntityStatisticValueDao valueDao) {
Random rnd = new Random(System.currentTimeMillis());
List<EntityStatisticValue> values = streamOrgUnitRefs(orgUnits).map(appRef -> {
String result = randomPick("COMPLIANT", "PARTIALLY_COMPLIANT", "NON_COMPLIANT");
return ImmutableEntityStatisticValue.builder().entity(appRef).current(true).state(StatisticValueState.PROVIDED).outcome(result).value(String.valueOf(rnd.nextInt(5000))).statisticId(defn.id().get()).createdAt(LocalDateTime.now()).provenance(PROVENANCE).build();
}).collect(Collectors.toList());
valueDao.bulkSaveValues(values);
}
use of com.khartec.waltz.data.entity_statistic.EntityStatisticValueDao in project waltz by khartec.
the class EntityStatisticGenerator method apply.
@Override
public Map<String, Integer> apply(ApplicationContext context) {
DSLContext dsl = context.getBean(DSLContext.class);
ApplicationDao applicationDao = context.getBean(ApplicationDao.class);
OrganisationalUnitDao organisationalUnitDao = context.getBean(OrganisationalUnitDao.class);
EntityStatisticValueDao valueDao = context.getBean(EntityStatisticValueDao.class);
EntityStatisticDefinitionDao definitionDao = context.getBean(EntityStatisticDefinitionDao.class);
EntityHierarchyService entityHierarchyService = context.getBean(EntityHierarchyService.class);
Application[] applications = applicationDao.getAll().toArray(new Application[0]);
OrganisationalUnit[] orgUnits = organisationalUnitDao.findAll().toArray(new OrganisationalUnit[0]);
dsl.deleteFrom(ENTITY_STATISTIC_DEFINITION).where(ENTITY_STATISTIC_DEFINITION.PROVENANCE.eq("DEMO")).execute();
System.out.println("deleted existing statistics (provenance: '" + PROVENANCE + "')");
dsl.update(ENTITY_STATISTIC_VALUE).set(ENTITY_STATISTIC_VALUE.CURRENT, false).where(ENTITY_STATISTIC_VALUE.PROVENANCE.eq("DEMO")).execute();
System.out.println("marked existing statistic values as non-current (provenance: '" + PROVENANCE + "')");
definitionDao.insert(SDLC);
definitionDao.insert(SDLC_TECH);
definitionDao.insert(SDLC_PROCESS);
definitionDao.insert(SDLC_JIRA);
definitionDao.insert(SDLC_SVN);
definitionDao.insert(SDLC_WIKI);
definitionDao.insert(AUDIT);
definitionDao.insert(SERVER_COUNT);
definitionDao.insert(PRE_COMPUTED);
createAdoptionStatsFor(SDLC_TECH, applications, valueDao);
createAdoptionStatsFor(SDLC_PROCESS, applications, valueDao);
createAdoptionStatsFor(SDLC_JIRA, applications, valueDao);
createAdoptionStatsFor(SDLC_SVN, applications, valueDao);
createAdoptionStatsFor(SDLC_WIKI, applications, valueDao);
createIntStatsFor(AUDIT, applications, valueDao, 20, failIfPositiveFn);
createIntStatsFor(SDLC, applications, valueDao, 20, failIfPositiveFn);
createIntStatsFor(SERVER_COUNT, applications, valueDao, 20, (x, y) -> "VIRTUAL");
createIntStatsFor(SERVER_COUNT, applications, valueDao, 20, (x, y) -> "BARE_METAL");
createPreComputedStatsFor(PRE_COMPUTED, orgUnits, valueDao);
entityHierarchyService.buildFor(EntityKind.ENTITY_STATISTIC);
System.out.println("Rebuilt entity hierarchy");
return null;
}
use of com.khartec.waltz.data.entity_statistic.EntityStatisticValueDao in project waltz by khartec.
the class EntityStatisticGenerator method createIntStatsFor.
private void createIntStatsFor(EntityStatisticDefinition defn, Application[] applications, EntityStatisticValueDao valueDao, int bound, BiFunction<StatisticValueState, Integer, String> outcomeFn) {
Random rnd = new Random(System.currentTimeMillis());
List<EntityStatisticValue> values = streamAppRefs(applications).map(appRef -> {
StatisticValueState state = randomPick(StatisticValueState.values());
int v = state == StatisticValueState.PROVIDED ? rnd.nextInt(bound) : 0;
// naughty
v = rnd.nextInt(bound);
return ImmutableEntityStatisticValue.builder().entity(appRef).state(state).statisticId(defn.id().get()).current(true).createdAt(LocalDateTime.now()).value(Integer.toString(v)).outcome(outcomeFn.apply(state, v)).provenance(PROVENANCE).build();
}).collect(toList());
valueDao.bulkSaveValues(values);
}
Aggregations