use of org.supercsv.io.CsvListWriter in project waltz by khartec.
the class BaseDataExtractor method writeFile.
protected Object writeFile(String suggestedFilename, CSVSerializer extractor, Response response) throws Exception {
response.type(MimeTypes.Type.TEXT_PLAIN.name());
response.header("Content-disposition", "attachment; filename=" + suggestedFilename);
StringWriter bodyWriter = new StringWriter();
CsvPreference csvPreference = CsvPreference.EXCEL_PREFERENCE;
CsvListWriter csvWriter = new CsvListWriter(bodyWriter, csvPreference);
csvWriter.write("sep=" + Character.toString((char) csvPreference.getDelimiterChar()));
extractor.accept(csvWriter);
csvWriter.flush();
return bodyWriter.toString();
}
use of org.supercsv.io.CsvListWriter in project hive by apache.
the class SeparatedValuesOutputFormat method print.
@Override
public int print(Rows rows) {
CsvPreference csvPreference = getCsvPreference();
CsvListWriter writer = new CsvListWriter(this.buffer, csvPreference);
int count = 0;
Rows.Row labels = (Rows.Row) rows.next();
if (beeLine.getOpts().getShowHeader()) {
fillBuffer(writer, labels);
String line = getLine(this.buffer);
beeLine.output(line);
}
while (rows.hasNext()) {
fillBuffer(writer, (Rows.Row) rows.next());
String line = getLine(this.buffer);
beeLine.output(line);
count++;
}
return count;
}
use of org.supercsv.io.CsvListWriter in project waltz by khartec.
the class FlowSummaryWithTypesAndPhysicalsExport method setupCSVWriter.
private static CsvListWriter setupCSVWriter() throws IOException {
CsvListWriter csvWriter = new CsvListWriter(new OutputStreamWriter(new FileOutputStream("/temp/flows.csv")), CsvPreference.EXCEL_PREFERENCE);
csvWriter.write("Source App", "Source Asset Code", "Source App Kind", "Source App Status", // src OU
"Source App Org Unit", "Source In Scope", "Target App", "Target Asset Code", "Target App Kind", "Target App Status", // src OU
"Target App Org Unit", "Target In Scope", "Data Types", "Physical Name", "Physical ExtId", "Physical Transport", "Physical Format", "Physical Frequency", "Criticality");
return csvWriter;
}
use of org.supercsv.io.CsvListWriter in project waltz by khartec.
the class FlowSummaryWithTypesAndPhysicalsExport method main.
public static void main(String[] args) throws IOException {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
DSLContext dsl = ctx.getBean(DSLContext.class);
ApplicationIdSelectorFactory appIdSelectorFactory = new ApplicationIdSelectorFactory();
ApplicationDao applicationDao = ctx.getBean(ApplicationDao.class);
OrganisationalUnitDao organisationalUnitDao = ctx.getBean(OrganisationalUnitDao.class);
LogicalFlowDao logicalFlowDao = ctx.getBean(LogicalFlowDao.class);
LogicalFlowDecoratorDao decoratorDao = ctx.getBean(LogicalFlowDecoratorDao.class);
DataTypeDao dataTypeDao = ctx.getBean(DataTypeDao.class);
Select<Record1<Long>> appSelector = mkAppIdSelector(appIdSelectorFactory);
Select<Record1<Long>> logicalFlowSelector = mkLogicalFlowSelectorFromAppSelector(appSelector);
System.out.println("Loading apps");
Set<Application> allApps = fromCollection(applicationDao.findAll());
System.out.println("Loading in scope apps");
Set<Long> inScopeAppIds = toIds(applicationDao.findByAppIdSelector(appSelector));
System.out.println("Loading OUs");
List<OrganisationalUnit> allOUs = organisationalUnitDao.findAll();
System.out.println("Loading DTs");
List<DataType> allDataTypes = dataTypeDao.findAll();
System.out.println("Loading Logical Flows");
List<LogicalFlow> logicalFlows = logicalFlowDao.findBySelector(logicalFlowSelector);
System.out.println("Loading decorators");
List<DataTypeDecorator> decorators = decoratorDao.findByAppIdSelector(appSelector);
System.out.println("Loading phys flows");
Map<Long, Collection<Tuple7<Long, String, String, String, String, String, String>>> physicalsByLogical = loadPhysicalsByLogical(dsl, logicalFlowSelector);
System.out.println("Indexing");
Map<Optional<Long>, Application> appsById = indexByOptId(allApps);
Map<Optional<Long>, DataType> dataTypesById = indexByOptId(allDataTypes);
Map<Optional<Long>, OrganisationalUnit> ousById = indexByOptId(allOUs);
Map<Long, Collection<DataTypeDecorator>> decoratorsByLogicalFlowId = groupBy(DataTypeDecorator::dataFlowId, decorators);
System.out.println("Processing");
CsvListWriter csvWriter = setupCSVWriter();
logicalFlows.stream().filter(lf -> lf.source().kind() == EntityKind.APPLICATION && lf.target().kind() == EntityKind.APPLICATION).map(Tuple::tuple).map(t -> t.concat(appsById.get(Optional.of(t.v1.source().id())))).map(t -> t.concat(appsById.get(Optional.of(t.v1.target().id())))).filter(t -> t.v2 != null && t.v3 != null).map(t -> t.concat(ousById.get(Optional.of(t.v2.organisationalUnitId())))).map(t -> t.concat(ousById.get(Optional.of(t.v3.organisationalUnitId())))).map(t -> t.concat(decoratorsByLogicalFlowId.getOrDefault(t.v1.id().orElse(-1L), emptyList()).stream().filter(d -> d.decoratorEntity().kind() == EntityKind.DATA_TYPE).map(d -> dataTypesById.get(Optional.of(d.decoratorEntity().id()))).sorted(Comparator.comparing(NameProvider::name)).collect(Collectors.toList()))).map(t -> t.concat(inScopeAppIds.contains(t.v2.id().get()))).map(t -> t.concat(inScopeAppIds.contains(t.v3.id().get()))).flatMap(t -> physicalsByLogical.getOrDefault(t.v1.id().orElse(-1L), newArrayList(tuple(-1L, "-", "-", "-", "-", "-", "-"))).stream().map(p -> t.concat(p.skip1()))).map(t -> newArrayList(// src
t.v2.name(), t.v2.assetCode().map(ExternalIdValue::value).orElse(""), t.v2.applicationKind().name(), t.v2.entityLifecycleStatus().name(), // src OU
Optional.ofNullable(t.v4).map(NameProvider::name).orElse("?"), t.v7.toString(), // trg
t.v3.name(), t.v3.assetCode().map(ExternalIdValue::value).orElse(""), t.v3.applicationKind().name(), t.v3.entityLifecycleStatus().name(), // trg OU
Optional.ofNullable(t.v5).map(NameProvider::name).orElse("?"), t.v8.toString(), StringUtilities.joinUsing(t.v6, NameProvider::name, ","), t.v9, t.v10, t.v11, t.v12, t.v13, t.v14)).forEach(Unchecked.consumer(csvWriter::write));
}
use of org.supercsv.io.CsvListWriter in project waltz by khartec.
the class AttestationExtractor method mkCSVReport.
private byte[] mkCSVReport(List<String> columnDefinitions, Set<ApplicationAttestationInstanceSummary> reportRows) throws IOException {
StringWriter writer = new StringWriter();
CsvListWriter csvWriter = new CsvListWriter(writer, CsvPreference.EXCEL_PREFERENCE);
csvWriter.write(columnDefinitions);
reportRows.forEach(unchecked(r -> {
List<Object> values = asList(r.appRef().id(), r.appRef().name().get(), r.appAssetCode(), r.appCriticality(), r.appLifecyclePhase(), r.appKind(), r.attestedAt(), r.attestedBy());
csvWriter.write(values);
}));
csvWriter.flush();
return writer.toString().getBytes();
}
Aggregations