use of org.spf4j.stackmonitor.SampleNode in project spf4j by zolyfarkas.
the class Converter method loadLabeledDumps.
/**
* Load samples forrm file containing multiple labeled stack samples.
* @param file the ssdump3 file.
* @return
* @throws IOException
*/
@SuppressFBWarnings("NP_LOAD_OF_KNOWN_NULL_VALUE")
public static Map<String, SampleNode> loadLabeledDumps(final File file) throws IOException {
try (InputStream bis = newInputStream(file)) {
final SpecificDatumReader<StackSampleElement> reader = new SpecificDatumReader<>(StackSampleElement.SCHEMA$);
final BinaryDecoder decoder = DecoderFactory.get().directBinaryDecoder(bis, null);
long nrItems = decoder.readMapStart();
StackSampleElement asmp = new StackSampleElement();
Map<String, SampleNode> result = new HashMap<>((int) nrItems);
while (nrItems > 0) {
for (int i = 0; i < nrItems; i++) {
String key = decoder.readString();
TIntObjectMap<SampleNode> index = loadSamples(decoder, asmp, reader);
result.put(key, index.get(0));
}
nrItems = decoder.mapNext();
}
return result;
}
}
use of org.spf4j.stackmonitor.SampleNode in project spf4j by zolyfarkas.
the class Converter method convert.
@Nullable
public static SampleNode convert(final Iterator<StackSampleElement> samples) {
TIntObjectMap<SampleNode> index = new TIntObjectHashMap<>();
while (samples.hasNext()) {
StackSampleElement asmp = samples.next();
SampleNode sn = new SampleNode(asmp.getCount());
SampleNode parent = index.get(asmp.getParentId());
if (parent != null) {
Method m = asmp.getMethod();
parent.put(m, sn);
}
index.put(asmp.getId(), sn);
}
return index.get(0);
}
use of org.spf4j.stackmonitor.SampleNode in project spf4j by zolyfarkas.
the class Converter method loadSamples.
// it's a private method, don't care about being generic
@SuppressFBWarnings("OCP_OVERLY_CONCRETE_PARAMETER")
private static TIntObjectMap<SampleNode> loadSamples(final Decoder decoder, final StackSampleElement pasmp, final SpecificDatumReader<StackSampleElement> reader) throws IOException {
TIntObjectMap<SampleNode> index = new TIntObjectHashMap<>();
long nrArrayItems = decoder.readArrayStart();
while (nrArrayItems > 0) {
for (int j = 0; j < nrArrayItems; j++) {
StackSampleElement asmp = reader.read(pasmp, decoder);
SampleNode sn = new SampleNode(asmp.getCount());
SampleNode parent = index.get(asmp.getParentId());
if (parent != null) {
Method readMethod = asmp.getMethod();
Method m = new Method(readMethod.getDeclaringClass(), readMethod.getName());
parent.put(m, sn);
}
index.put(asmp.getId(), sn);
}
nrArrayItems = decoder.arrayNext();
}
return index;
}
use of org.spf4j.stackmonitor.SampleNode in project spf4j by zolyfarkas.
the class ConverterTest method testSaveLoad.
@Test
public void testSaveLoad() throws IOException {
File test = File.createTempFile("test", ".ssdumnp3");
SampleNode testSample = testSample();
Map<String, SampleNode> dumps = new HashMap<>(4);
dumps.put("zero", null);
dumps.put("something", testSample);
Converter.saveLabeledDumps(test, dumps);
Map<String, SampleNode> loadLabeledDumps = Converter.loadLabeledDumps(test);
Assert.assertEquals(testSample, loadLabeledDumps.get("something"));
}
use of org.spf4j.stackmonitor.SampleNode in project spf4j by zolyfarkas.
the class Explorer method toSupplier.
// GEN-LAST:event_compareMenuItemActionPerformed
private StackSampleSupplier toSupplier(final File file) throws IOException {
if (Spf4jFileFilter.SSDUMP.accept(file)) {
SampleNode samples = loadLegacyFormat(file);
Instant now = Instant.now();
return new OneStackSampleSupplier(now, now, samples);
} else if (Spf4jFileFilter.SSDUMP2.accept(file) || Spf4jFileFilter.SSDUMP2_GZ.accept(file)) {
SampleNode samples = org.spf4j.ssdump2.Converter.load(file);
Instant now = Instant.now();
return new OneStackSampleSupplier(now, now, samples);
} else if (Spf4jFileFilter.SSDUMP3.accept(file) || Spf4jFileFilter.SSDUMP3_GZ.accept(file)) {
Map<String, SampleNode> loadLabeledDumps = org.spf4j.ssdump2.Converter.loadLabeledDumps(file);
Instant now = Instant.now();
return new MultiStackSampleSupplier(now, now, loadLabeledDumps);
} else if (Spf4jFileFilter.D3_JSON.accept(file)) {
try (BufferedReader br = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
Pair<Method, SampleNode> parse = SampleNode.parseD3Json(br);
Instant now = Instant.now();
return new OneStackSampleSupplier(now, now, parse.getSecond());
}
} else if (Spf4jFileFilter.SPF4J_JSON.accept(file)) {
try (BufferedReader br = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
Pair<Method, SampleNode> parse = SampleNode.parse(br);
Instant now = Instant.now();
return new OneStackSampleSupplier(now, now, parse.getSecond());
}
} else if (Spf4jFileFilter.PROFILE_AVRO.accept(file)) {
return new AvroStackSampleSupplier(file.toPath());
} else {
throw new IOException("Unsupported file format " + file);
}
}
Aggregations