Search in sources :

Example 56 with DataFileStream

use of org.apache.avro.file.DataFileStream in project spf4j by zolyfarkas.

the class AvroStackSampleSupplier method getMetaData.

@Override
public ProfileMetaData getMetaData(final Instant pfrom, final Instant pto) throws IOException {
    Set<String> contexts = new HashSet<>();
    Set<String> tags = new HashSet<>();
    try (DataFileStream<ApplicationStackSamples> stream = new DataFileStream<>(Files.newInputStream(file), reader)) {
        while (stream.hasNext()) {
            ApplicationStackSamples samples = stream.next();
            Instant sampleFrom = samples.getCollectedFrom();
            Instant sampleTo = samples.getCollectedTo();
            if ((sampleFrom.compareTo(sampleTo) == 0 && sampleFrom.compareTo(pfrom) >= 0 && sampleFrom.compareTo(pfrom) <= 0) || (sampleFrom.isBefore(pto) && sampleTo.isAfter(pfrom))) {
                contexts.add(samples.getContext());
                tags.add(samples.getTag());
            }
        }
    }
    return new ProfileMetaData(contexts, tags);
}
Also used : ApplicationStackSamples(org.spf4j.base.avro.ApplicationStackSamples) Instant(java.time.Instant) DataFileStream(org.apache.avro.file.DataFileStream) HashSet(java.util.HashSet)

Example 57 with DataFileStream

use of org.apache.avro.file.DataFileStream in project spf4j by zolyfarkas.

the class AvroStackSampleSupplier method getSamples.

@Override
public SampleNode getSamples(final String context, final String tag, final Instant pfrom, final Instant pto) throws IOException {
    SampleNode result = null;
    try (DataFileStream<ApplicationStackSamples> stream = new DataFileStream<>(Files.newInputStream(file), reader)) {
        while (stream.hasNext()) {
            ApplicationStackSamples samples = stream.next();
            Instant sampleFrom = samples.getCollectedFrom();
            Instant sampleTo = samples.getCollectedTo();
            if (((sampleFrom.compareTo(sampleTo) == 0 && sampleFrom.compareTo(pfrom) >= 0 && sampleFrom.compareTo(pfrom) <= 0) || (sampleFrom.isBefore(pto) && sampleTo.isAfter(pfrom))) && (tag == null || samples.getTag().equals(tag)) && (context == null || samples.getContext().equals(context))) {
                SampleNode currentSamples = Converter.convert(samples.getStackSamples().iterator());
                if (result == null) {
                    result = currentSamples;
                } else if (currentSamples != null) {
                    result.add(currentSamples);
                }
            }
        }
    }
    return result;
}
Also used : ApplicationStackSamples(org.spf4j.base.avro.ApplicationStackSamples) Instant(java.time.Instant) DataFileStream(org.apache.avro.file.DataFileStream)

Example 58 with DataFileStream

use of org.apache.avro.file.DataFileStream in project spf4j by zolyfarkas.

the class AvroStackSampleSupplier method scanLimits.

private synchronized void scanLimits() throws IOException {
    if (this.from == null) {
        Instant lfrom = Instant.MIN;
        Instant lto = Instant.MAX;
        try (DataFileStream<ApplicationStackSamples> stream = new DataFileStream<>(Files.newInputStream(file), reader)) {
            if (stream.hasNext()) {
                ApplicationStackSamples samples = stream.next();
                lfrom = samples.getCollectedFrom();
                lto = samples.getCollectedTo();
            }
            while (stream.hasNext()) {
                ApplicationStackSamples samples = stream.next();
                lto = samples.getCollectedTo();
            }
            this.from = lfrom;
            this.to = lto;
        }
    }
}
Also used : ApplicationStackSamples(org.spf4j.base.avro.ApplicationStackSamples) Instant(java.time.Instant) DataFileStream(org.apache.avro.file.DataFileStream)

Example 59 with DataFileStream

use of org.apache.avro.file.DataFileStream in project spf4j by zolyfarkas.

the class AvroMeasurementStoreReader method getMeasurements.

@Override
public Collection<Schema> getMeasurements(final Predicate<String> filter) throws IOException {
    Map<String, Pair<Schema, Set<Long>>> result = new THashMap<>();
    try (DataFileStream<TableDef> stream = new DataFileStream<TableDef>(Files.newInputStream(infoFile), new SpecificDatumReader<>(TableDef.class))) {
        for (TableDef td : stream) {
            String name = td.getName();
            if (filter.test(TableDefs.sanitizeName(name))) {
                Pair<Schema, Set<Long>> exSch = result.get(name);
                if (exSch == null) {
                    Schema sch = TableDefs.createSchema(td);
                    Set<Long> ids = new HashSet<>(2);
                    ids.add(td.getId());
                    exSch = Pair.of(sch, ids);
                    result.put(name, exSch);
                } else {
                    exSch.getValue().add(td.getId());
                }
            }
        }
    }
    return result.values().stream().map(x -> {
        Schema sch = x.getKey();
        sch.addProp(TimeSeriesRecord.IDS_PROP, x.getValue());
        return sch;
    }).collect(Collectors.toCollection(() -> new ArrayList<>(result.size())));
}
Also used : Iterables(com.google.common.collect.Iterables) Arrays(java.util.Arrays) AvroCloseableIterable(org.spf4j.base.avro.AvroCloseableIterable) ParametersAreNonnullByDefault(javax.annotation.ParametersAreNonnullByDefault) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) DirectoryStream(java.nio.file.DirectoryStream) Map(java.util.Map) TableDefs(org.spf4j.tsdb2.TableDefs) Path(java.nio.file.Path) Nonnull(javax.annotation.Nonnull) Observation(org.spf4j.tsdb2.avro.Observation) Function(com.google.common.base.Function) Schema(org.apache.avro.Schema) MeasurementStoreQuery(org.spf4j.perf.MeasurementStoreQuery) Files(java.nio.file.Files) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Set(java.util.Set) DataFileStream(org.apache.avro.file.DataFileStream) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) SpecificDatumReader(org.apache.avro.specific.SpecificDatumReader) Closeables(org.spf4j.base.Closeables) UncheckedIOException(java.io.UncheckedIOException) List(java.util.List) TimeSeriesRecord(org.spf4j.perf.TimeSeriesRecord) Closeable(java.io.Closeable) THashMap(gnu.trove.map.hash.THashMap) Pair(org.spf4j.base.Pair) Collections(java.util.Collections) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) TableDef(org.spf4j.tsdb2.avro.TableDef) HashSet(java.util.HashSet) Set(java.util.Set) Schema(org.apache.avro.Schema) ArrayList(java.util.ArrayList) DataFileStream(org.apache.avro.file.DataFileStream) TableDef(org.spf4j.tsdb2.avro.TableDef) THashMap(gnu.trove.map.hash.THashMap) Pair(org.spf4j.base.Pair) HashSet(java.util.HashSet)

Aggregations

DataFileStream (org.apache.avro.file.DataFileStream)59 GenericRecord (org.apache.avro.generic.GenericRecord)39 GenericDatumReader (org.apache.avro.generic.GenericDatumReader)34 Test (org.junit.Test)26 Schema (org.apache.avro.Schema)21 ByteArrayInputStream (java.io.ByteArrayInputStream)20 InputStream (java.io.InputStream)19 IOException (java.io.IOException)13 ByteArrayOutputStream (java.io.ByteArrayOutputStream)11 File (java.io.File)9 FileInputStream (java.io.FileInputStream)9 ResultSet (java.sql.ResultSet)9 HashMap (java.util.HashMap)9 MockFlowFile (org.apache.nifi.util.MockFlowFile)9 Statement (java.sql.Statement)8 BufferedInputStream (java.io.BufferedInputStream)7 HashSet (java.util.HashSet)7 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)7 DataFileWriter (org.apache.avro.file.DataFileWriter)7 Path (org.apache.hadoop.fs.Path)7