use of com.github.davidmoten.rx.slf4j.Logging in project risky by amsa-code.
the class ByMmsiToDailyConverter method convert.
public static void convert(File input, File output) {
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd").withZone(ZoneOffset.UTC);
output.mkdirs();
try {
FileUtils.cleanDirectory(output);
} catch (IOException e1) {
throw new RuntimeException(e1);
}
{
List<File> files = Files.find(input, Pattern.compile(".*\\.track"));
System.out.println("found " + files.size() + " files");
int bufferSize = 1000;
Observable.from(files).flatMap(file -> BinaryFixes.from(file, true)).lift(Logging.<Fix>logger().showMemory().every(1000000).showCount("recordsMillions").log()).groupBy(fix -> dtf.format(Instant.ofEpochMilli(fix.time()))).flatMap(g -> g.buffer(bufferSize)).doOnNext(list -> {
File file = new File(output, dtf.format(Instant.ofEpochMilli(list.get(0).time())) + ".fix");
try (OutputStream os = new FileOutputStream(file, true)) {
for (Fix fix : list) {
BinaryFixes.write(fix, os, BinaryFixesFormat.WITH_MMSI);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}).count().toBlocking().single();
}
}
use of com.github.davidmoten.rx.slf4j.Logging in project risky by amsa-code.
the class DriftCandidatesMain method main.
public static void main(String[] args) throws FileNotFoundException {
PrintStream out = new PrintStream("target/output.txt");
out.format("%s\t%s\t%s\t%s\t%s\n", "mmsi", "imo", "date", "lat", "lon");
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd");
Map<Integer, Info> ships = ShipStaticData.getMapFromResource("/ship-data-2014.txt");
Map<String, Map<String, String>> ihs = IhsReader.fromZipAsMapByMmsi(new File("/media/an/ship-data/ihs/608750-2015-04-01.zip")).toBlocking().single();
for (int year = 2012; year <= 2014; year++) {
DriftCandidates.fromCsv(new File("/media/an/drift-candidates/drift-candidates-" + year + ".txt.gz"), true).lift(Logging.<DriftCandidate>logger().showCount().showMemory().every(100000).log()).filter(c -> c.driftingSince() == c.fix().time()).distinct(c -> c.fix().mmsi() + ":" + DateTimeFormatter.ISO_DATE.format(Instant.ofEpochMilli(c.fix().time()).atZone(ZoneOffset.UTC))).doOnNext(c -> {
// lookup the imo from ais ship static reports
Optional<Info> aisInfo = Optional.ofNullable(ships.get(c.fix().mmsi()));
Optional<String> aisImo;
Optional<String> ihsImo;
if (aisInfo.isPresent() && aisInfo.get().imo.isPresent()) {
aisImo = Optional.of(aisInfo.get().imo.get());
ihsImo = Optional.empty();
} else {
aisImo = Optional.empty();
// lookup the imo from ihs data
Optional<Map<String, String>> ihsInfo = Optional.ofNullable(ihs.get(c.fix().mmsi()));
if (ihsInfo.isPresent()) {
ihsImo = Optional.ofNullable(ihsInfo.get().get(Key.LRIMOShipNo.toString()));
} else {
ihsImo = Optional.empty();
}
}
String imo = aisImo.orElse(ihsImo.orElse(""));
out.format("%s\t%s\t%s\t%s\t%s\n", c.fix().mmsi(), imo, dtf.format(Instant.ofEpochMilli(c.fix().time()).atZone(ZoneOffset.UTC)), c.fix().lat(), c.fix().lon());
}).count().toBlocking().single();
}
out.close();
}
use of com.github.davidmoten.rx.slf4j.Logging in project risky by amsa-code.
the class ShipStaticDataCreator method writeStaticDataToFile.
public static Observable<AisShipStatic> writeStaticDataToFile(List<File> files, File outputFile, Scheduler scheduler) {
Func0<PrintStream> resourceFactory = Checked.f0(() -> new PrintStream(outputFile));
Func1<PrintStream, Observable<AisShipStatic>> observableFactory = out -> Observable.from(files).buffer(Math.max(1, files.size() / Runtime.getRuntime().availableProcessors() - 1)).flatMap(list -> //
Observable.from(list).lift(//
Logging.<File>logger().showValue().showMemory().log()).concatMap(file -> //
Streams.extract(Streams.nmeaFromGzip(file)).flatMap(//
aisShipStaticOnly).map(//
m -> m.getMessage().get().message()).distinct(//
m -> m.getMmsi()).doOnError(e -> System.err.println("could not read " + file + ": " + //
e.getMessage())).onErrorResumeNext(//
Observable.<AisShipStatic>empty())).distinct(//
m -> m.getMmsi()).subscribeOn(//
scheduler)).distinct(//
m -> m.getMmsi()).compose(//
Transformers.mapWithIndex()).doOnNext(indexed -> {
if (indexed.index() == 0) {
out.println("# MMSI, IMO, AisClass, AisShipType, MaxPresentStaticDraughtMetres, DimAMetres, DimBMetres, DimCMetres, DimDMetres, LengthMetres, WidthMetres, Name");
out.println("# columns are tab delimited");
out.println("# -1 = not present");
}
}).map(indexed -> indexed.value()).doOnNext(m -> {
out.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n", m.getMmsi(), getImo(m).orElse(-1), m instanceof AisShipStaticA ? "A" : "B", m.getShipType(), getMaximumPresentStaticDraughtMetres(m).orElse(-1F), m.getDimensionA().orElse(-1), m.getDimensionB().orElse(-1), m.getDimensionC().orElse(-1), m.getDimensionD().orElse(-1), AisShipStaticUtil.lengthMetres(m).orElse(-1), AisShipStaticUtil.widthMetres(m).orElse(-1), prepareName(m.getName()));
out.flush();
});
Action1<PrintStream> disposeAction = out -> out.close();
return Observable.using(resourceFactory, observableFactory, disposeAction);
}
use of com.github.davidmoten.rx.slf4j.Logging in project risky by amsa-code.
the class AdHocMain method main.
public static void main(String[] args) throws IOException {
long start = ZonedDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse("2014-05-13T00:00:00Z")).toEpochSecond() * 1000;
long finish = ZonedDateTime.from(DateTimeFormatter.ISO_DATE_TIME.parse("2014-05-27T00:00:00Z")).toEpochSecond() * 1000;
Pattern pattern = Pattern.compile(".*\\.track");
PrintStream out = new PrintStream("target/output.txt");
out.println("mmsi\ttime\tlat\tlong\tcourse\tspeedKnots");
List<File> files = Files.find(new File("/media/an/binary-fixes-5-minute/2014"), pattern);
Observable.from(files).flatMap(file -> extract(file, start, finish).subscribeOn(Schedulers.computation())).filter(fix -> MmsiValidator2.INSTANCE.isValid(fix.mmsi())).map(f -> String.format("%s\t%s\t%s\t%s\t%s\t%s", f.mmsi(), formatDateTime(f.time()), f.lat(), f.lon(), get(f.courseOverGroundDegrees()), get(f.speedOverGroundKnots()))).doOnNext(out::println).lift(Logging.<String>logger().showCount().every(10000).log()).count().doOnTerminate(out::close).toBlocking().single();
}
Aggregations