Search in sources :

Example 1 with ObjectMapperString

use of com.datatorrent.common.util.ObjectMapperString in project apex-core by apache.

the class StatsAgent method processOperatorPartFile.

private void processOperatorPartFile(BufferedReader partBr, Long startTime, Long endTime, List<OperatorStatsInfo> result) throws IOException {
    String partLine;
    // advance until offset is reached
    while ((partLine = partBr.readLine()) != null) {
        OperatorStatsInfo os = new OperatorStatsInfo();
        int cursor = 0;
        int cursor2;
        cursor2 = partLine.indexOf(':', cursor);
        os.operatorId = Integer.valueOf(partLine.substring(cursor, cursor2));
        cursor = cursor2 + 1;
        cursor2 = partLine.indexOf(':', cursor);
        os.timestamp = Long.valueOf(partLine.substring(cursor, cursor2));
        cursor = cursor2 + 1;
        os.stats = new ObjectMapperString(partLine.substring(cursor));
        if ((startTime == null || os.timestamp >= startTime) && (endTime == null || os.timestamp <= endTime)) {
            result.add(os);
        }
    }
}
Also used : ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString)

Example 2 with ObjectMapperString

use of com.datatorrent.common.util.ObjectMapperString in project apex-core by apache.

the class StatsAgent method processContainerPartFile.

private void processContainerPartFile(BufferedReader partBr, Long startTime, Long endTime, List<ContainerStatsInfo> result) throws IOException {
    String partLine;
    while ((partLine = partBr.readLine()) != null) {
        ContainerStatsInfo cs = new ContainerStatsInfo();
        int cursor = 0;
        int cursor2;
        cursor2 = partLine.indexOf(':', cursor);
        cs.containerId = Integer.valueOf(partLine.substring(cursor, cursor2));
        cursor = cursor2 + 1;
        cursor2 = partLine.indexOf(':', cursor);
        cs.timestamp = Long.valueOf(partLine.substring(cursor, cursor2));
        cursor = cursor2 + 1;
        cs.stats = new ObjectMapperString(partLine.substring(cursor));
        if ((startTime == null || cs.timestamp >= startTime) && (endTime == null || cs.timestamp <= endTime)) {
            result.add(cs);
        }
    }
}
Also used : ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString)

Example 3 with ObjectMapperString

use of com.datatorrent.common.util.ObjectMapperString in project apex-core by apache.

the class LogicalPlanSerializer method convertToMap.

/**
 * @param dag
 * @return
 */
public static Map<String, Object> convertToMap(LogicalPlan dag, boolean includeModules) {
    HashMap<String, Object> result = new HashMap<>();
    ArrayList<Object> operatorArray = new ArrayList<>();
    ArrayList<Object> streamMap = new ArrayList<>();
    // result.put("applicationName", appConfig.getName());
    result.put("operators", operatorArray);
    result.put("streams", streamMap);
    // LogicalPlan dag = StramAppLauncher.prepareDAG(appConfig, StreamingApplication.LAUNCHMODE_YARN);
    // 
    // should we put the DAGContext info here?
    Map<String, Object> dagAttrs = new HashMap<>();
    for (Map.Entry<Attribute<Object>, Object> e : Attribute.AttributeMap.AttributeInitializer.getAllAttributes(dag, Context.DAGContext.class).entrySet()) {
        dagAttrs.put(e.getKey().getSimpleName(), e.getValue());
    }
    result.put("attributes", dagAttrs);
    Collection<OperatorMeta> allOperators = dag.getAllOperators();
    ObjectMapper propertyObjectMapper = new ObjectMapper();
    propertyObjectMapper.configure(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS, true);
    propertyObjectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
    StdTypeResolverBuilder typer = new PropertyTypeResolverBuilder();
    typer.init(JsonTypeInfo.Id.CLASS, null);
    typer = typer.inclusion(JsonTypeInfo.As.PROPERTY);
    propertyObjectMapper.setDefaultTyping(typer);
    for (OperatorMeta operatorMeta : allOperators) {
        HashMap<String, Object> operatorDetailMap = new HashMap<>();
        ArrayList<Map<String, Object>> portList = new ArrayList<>();
        Map<String, Object> attributeMap = new HashMap<>();
        String operatorName = operatorMeta.getName();
        operatorArray.add(operatorDetailMap);
        operatorDetailMap.put("name", operatorName);
        operatorDetailMap.put("ports", portList);
        operatorDetailMap.put("class", operatorMeta.getOperator().getClass().getName());
        operatorDetailMap.put("attributes", attributeMap);
        Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(operatorMeta, Context.OperatorContext.class);
        for (Map.Entry<Attribute<Object>, Object> entry : rawAttributes.entrySet()) {
            attributeMap.put(entry.getKey().getSimpleName(), entry.getValue());
        }
        ObjectMapperString str;
        try {
            str = new ObjectMapperString(propertyObjectMapper.writeValueAsString(operatorMeta.getOperator()));
        } catch (Throwable ex) {
            LOG.error("Got exception when trying to get properties for operator {}", operatorMeta.getName(), ex);
            str = null;
        }
        operatorDetailMap.put("properties", str);
        Operators.PortMappingDescriptor pmd = new Operators.PortMappingDescriptor();
        Operators.describe(operatorMeta.getOperator(), pmd);
        for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : pmd.inputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            InputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "input");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
        for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : pmd.outputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            OutputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "output");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
    }
    Collection<StreamMeta> allStreams = dag.getAllStreams();
    for (StreamMeta streamMeta : allStreams) {
        HashMap<String, Object> streamDetailMap = new HashMap<>();
        String streamName = streamMeta.getName();
        streamMap.add(streamDetailMap);
        String sourcePortName = streamMeta.getSource().getPortName();
        OperatorMeta operatorMeta = streamMeta.getSource().getOperatorMeta();
        HashMap<String, Object> sourcePortDetailMap = new HashMap<>();
        sourcePortDetailMap.put("operatorName", operatorMeta.getName());
        sourcePortDetailMap.put("portName", sourcePortName);
        streamDetailMap.put("name", streamName);
        streamDetailMap.put("source", sourcePortDetailMap);
        Collection<InputPortMeta> sinks = streamMeta.getSinks();
        ArrayList<HashMap<String, Object>> sinkPortList = new ArrayList<>();
        for (InputPortMeta sinkPort : sinks) {
            HashMap<String, Object> sinkPortDetailMap = new HashMap<>();
            sinkPortDetailMap.put("operatorName", sinkPort.getOperatorMeta().getName());
            sinkPortDetailMap.put("portName", sinkPort.getPortName());
            sinkPortList.add(sinkPortDetailMap);
        }
        streamDetailMap.put("sinks", sinkPortList);
        if (streamMeta.getLocality() != null) {
            streamDetailMap.put("locality", streamMeta.getLocality().name());
        }
    }
    if (includeModules) {
        ArrayList<Map<String, Object>> modulesArray = new ArrayList<>();
        result.put("modules", modulesArray);
        for (LogicalPlan.ModuleMeta meta : dag.getAllModules()) {
            modulesArray.add(getLogicalModuleDetails(dag, meta));
        }
    }
    return result;
}
Also used : HashMap(java.util.HashMap) Attribute(com.datatorrent.api.Attribute) StdTypeResolverBuilder(org.codehaus.jackson.map.jsontype.impl.StdTypeResolverBuilder) ArrayList(java.util.ArrayList) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) PortContextPair(com.datatorrent.stram.plan.logical.Operators.PortContextPair) StreamMeta(com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta) OutputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) Context(com.datatorrent.api.Context) Operators(com.datatorrent.stram.plan.logical.Operators) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) JSONObject(org.codehaus.jettison.json.JSONObject) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) HashMap(java.util.HashMap) Map(java.util.Map) BeanMap(org.apache.commons.beanutils.BeanMap)

Example 4 with ObjectMapperString

use of com.datatorrent.common.util.ObjectMapperString in project apex-core by apache.

the class TupleRecorder method setup.

public void setup(Operator operator, Map<Class<?>, Class<? extends StringCodec<?>>> codecs) {
    try {
        storage.setup();
        setStartTime(System.currentTimeMillis());
        if (id == null) {
            id = String.valueOf(startTime);
        }
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        bos.write((VERSION + "\n").getBytes());
        RecordInfo recordInfo = new RecordInfo();
        recordInfo.startTime = startTime;
        recordInfo.appId = appId;
        streamCodec = new JsonStreamCodec<>(codecs);
        if (operator != null) {
            BeanInfo beanInfo = Introspector.getBeanInfo(operator.getClass());
            PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
            for (PropertyDescriptor pd : propertyDescriptors) {
                String name = pd.getName();
                Method readMethod = pd.getReadMethod();
                if (readMethod != null) {
                    readMethod.setAccessible(true);
                    try {
                        Slice f = streamCodec.toByteArray(readMethod.invoke(operator));
                        recordInfo.properties.put(name, new ObjectMapperString(f.stringValue()));
                    } catch (Throwable t) {
                        logger.warn("Cannot serialize property {} for operator {}", name, operator.getClass());
                        recordInfo.properties.put(name, null);
                    }
                }
            }
        }
        Slice f = streamCodec.toByteArray(recordInfo);
        bos.write(f.buffer, f.offset, f.length);
        bos.write("\n".getBytes());
        for (PortInfo pi : portMap.values()) {
            f = streamCodec.toByteArray(pi);
            bos.write(f.buffer, f.offset, f.length);
            bos.write("\n".getBytes());
        }
        storage.writeMetaData(bos.toByteArray());
        if (wsClient != null) {
            recordingNameTopic = "applications." + appId + ".tupleRecorder." + getStartTime();
            setupWsClient();
        }
    } catch (Exception ex) {
        logger.error("Trouble setting up tuple recorder", ex);
    }
}
Also used : PropertyDescriptor(java.beans.PropertyDescriptor) BeanInfo(java.beans.BeanInfo) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) Method(java.lang.reflect.Method) TimeoutException(java.util.concurrent.TimeoutException) JsonProcessingException(org.codehaus.jackson.JsonProcessingException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) Slice(com.datatorrent.netlet.util.Slice) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString)

Example 5 with ObjectMapperString

use of com.datatorrent.common.util.ObjectMapperString in project apex-core by apache.

the class RecordingsAgent method getRecordingInfoHelper.

private RecordingInfo getRecordingInfoHelper(String appId, String opId, String id, Set<String> containers) {
    RecordingInfo info = new RecordingInfo();
    info.id = id;
    info.appId = appId;
    info.operatorId = opId;
    BufferedReader br = null;
    IndexFileBufferedReader ifbr = null;
    try {
        String dir = getRecordingDirectory(appId, opId, id);
        if (dir == null) {
            throw new Exception("recording directory is null");
        }
        Path path = new Path(dir);
        JSONObject json;
        FileStatus fileStatus = stramAgent.getFileSystem().getFileStatus(path);
        HashMap<String, PortInfo> portMap = new HashMap<>();
        if (!fileStatus.isDirectory()) {
            throw new Exception(path + " is not a directory");
        }
        // META file processing
        br = new BufferedReader(new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.META_FILE))));
        String line;
        line = br.readLine();
        if (!line.equals("1.2")) {
            throw new Exception("Unexpected line: " + line);
        }
        line = br.readLine();
        json = new JSONObject(line);
        info.startTime = json.getLong("startTime");
        info.containerId = json.optString("containerId");
        info.properties = new HashMap<>();
        if (!StringUtils.isBlank(info.containerId) && !containers.contains(info.containerId)) {
            info.ended = true;
        }
        json = json.optJSONObject("properties");
        if (json != null) {
            @SuppressWarnings("unchecked") Iterator<String> keys = json.keys();
            while (keys.hasNext()) {
                String key = keys.next();
                // ugly 2 lines of code below since JSONObject.get(key).toString() doesn't give you json representation for plain strings
                String strValue = json.isNull(key) ? null : json.optString(key);
                info.properties.put(key, strValue != null ? strValue : new ObjectMapperString(json.get(key).toString()));
            }
        }
        info.ports = new ArrayList<>();
        while ((line = br.readLine()) != null) {
            PortInfo portInfo = new PortInfo();
            json = new JSONObject(line);
            portInfo.id = json.getInt("id");
            portInfo.name = json.getString("name");
            portInfo.type = json.getString("type");
            portInfo.streamName = json.getString("streamName");
            info.ports.add(portInfo);
            portMap.put(String.valueOf(portInfo.id), portInfo);
        }
        // INDEX file processing
        ifbr = new IndexFileBufferedReader(new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.INDEX_FILE))), dir);
        info.windowIdRanges = new ArrayList<>();
        long prevHiWindowId = -1;
        RecordingsIndexLine indexLine;
        while ((indexLine = (RecordingsIndexLine) ifbr.readIndexLine()) != null) {
            if (indexLine.isEndLine) {
                info.ended = true;
            } else {
                info.totalTuples += indexLine.tupleCount;
                for (Map.Entry<String, MutableLong> entry : indexLine.portTupleCount.entrySet()) {
                    PortInfo portInfo = portMap.get(entry.getKey());
                    if (portInfo == null) {
                        throw new Exception("port info does not exist for " + entry.getKey());
                    }
                    portInfo.tupleCount += entry.getValue().longValue();
                }
                for (TupleRecorder.Range r : indexLine.windowIdRanges) {
                    if (info.windowIdRanges.isEmpty()) {
                        TupleRecorder.Range range = new TupleRecorder.Range();
                        range.low = r.low;
                        info.windowIdRanges.add(range);
                    } else if (prevHiWindowId + 1 != r.low) {
                        TupleRecorder.Range range = info.windowIdRanges.get(info.windowIdRanges.size() - 1);
                        range.high = prevHiWindowId;
                        range = new TupleRecorder.Range();
                        range.low = r.low;
                        info.windowIdRanges.add(range);
                    }
                    prevHiWindowId = r.high;
                }
            }
        }
        if (!info.windowIdRanges.isEmpty()) {
            TupleRecorder.Range range = info.windowIdRanges.get(info.windowIdRanges.size() - 1);
            range.high = prevHiWindowId;
        }
    } catch (Exception ex) {
        LOG.warn("Cannot get recording info for app id {}: {}", appId, ex);
        return null;
    } finally {
        IOUtils.closeQuietly(ifbr);
        IOUtils.closeQuietly(br);
    }
    return info;
}
Also used : FileStatus(org.apache.hadoop.fs.FileStatus) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HashMap(java.util.HashMap) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) Path(org.apache.hadoop.fs.Path) InputStreamReader(java.io.InputStreamReader) TupleRecorder(com.datatorrent.stram.debug.TupleRecorder) IncompatibleVersionException(com.datatorrent.stram.client.WebServicesVersionConversion.IncompatibleVersionException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) MutableLong(org.apache.commons.lang.mutable.MutableLong) JSONObject(org.codehaus.jettison.json.JSONObject) BufferedReader(java.io.BufferedReader) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

ObjectMapperString (com.datatorrent.common.util.ObjectMapperString)5 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 JSONObject (org.codehaus.jettison.json.JSONObject)2 Attribute (com.datatorrent.api.Attribute)1 Context (com.datatorrent.api.Context)1 Slice (com.datatorrent.netlet.util.Slice)1 IncompatibleVersionException (com.datatorrent.stram.client.WebServicesVersionConversion.IncompatibleVersionException)1 TupleRecorder (com.datatorrent.stram.debug.TupleRecorder)1 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)1 InputPortMeta (com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta)1 OperatorMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta)1 OutputPortMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta)1 StreamMeta (com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta)1 Operators (com.datatorrent.stram.plan.logical.Operators)1 PortContextPair (com.datatorrent.stram.plan.logical.Operators.PortContextPair)1 BeanInfo (java.beans.BeanInfo)1 PropertyDescriptor (java.beans.PropertyDescriptor)1 BufferedReader (java.io.BufferedReader)1