Search in sources :

Example 1 with HttpStatisticsCollector

use of com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector in project databus by linkedin.

the class PhysicalSourcesRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException {
    ObjectMapper mapper = new ObjectMapper();
    StringWriter out = new StringWriter(10240);
    List<PhysicalSourceStaticConfig> sources = _relay.getPhysicalSources();
    if (sources.isEmpty())
        mapper.writeValue(out, new ArrayList<PhysicalSourceStaticConfig>());
    else
        mapper.writeValue(out, sources);
    byte[] resultBytes = out.toString().getBytes(Charset.defaultCharset());
    request.getResponseContent().write(ByteBuffer.wrap(resultBytes));
    HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
    if (null != relayStatsCollector) {
        HttpStatisticsCollector connStatsCollector = (HttpStatisticsCollector) request.getParams().get(relayStatsCollector.getName());
        if (null != connStatsCollector) {
            connStatsCollector.registerSourcesCall();
        } else {
            relayStatsCollector.registerSourcesCall();
        }
    }
    return request;
}
Also used : PhysicalSourceStaticConfig(com.linkedin.databus2.relay.config.PhysicalSourceStaticConfig) StringWriter(java.io.StringWriter) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) ArrayList(java.util.ArrayList) ObjectMapper(org.codehaus.jackson.map.ObjectMapper)

Example 2 with HttpStatisticsCollector

use of com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector in project databus by linkedin.

the class RegisterRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException {
    try {
        // fail early if optional version param is included but isn't valid
        // 2 and 3 are same for us; 4 is a superset only newer clients understand
        int registerRequestProtocolVersion = 3;
        String registerRequestProtocolVersionStr = request.getParams().getProperty(DatabusHttpHeaders.PROTOCOL_VERSION_PARAM);
        if (registerRequestProtocolVersionStr != null) {
            try {
                registerRequestProtocolVersion = Integer.parseInt(registerRequestProtocolVersionStr);
            } catch (NumberFormatException e) {
                LOG.error("Could not parse /register request protocol version: " + registerRequestProtocolVersionStr);
                throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
            }
            if (registerRequestProtocolVersion < 2 || registerRequestProtocolVersion > 4) {
                LOG.error("Out-of-range /register request protocol version: " + registerRequestProtocolVersionStr);
                throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.PROTOCOL_VERSION_PARAM, registerRequestProtocolVersionStr);
            }
        }
        Collection<LogicalSource> logicalSources = null;
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        String sources = request.getParams().getProperty(SOURCES_PARAM);
        if (null == sources) {
            // need to return all schemas, so first get all sources
            logicalSources = _relay.getSourcesIdNameRegistry().getAllSources();
        } else {
            String[] sourceIds = sources.split(",");
            logicalSources = new ArrayList<LogicalSource>(sourceIds.length);
            for (String sourceId : sourceIds) {
                int srcId;
                String trimmedSourceId = sourceId.trim();
                try {
                    srcId = Integer.valueOf(trimmedSourceId);
                    LogicalSource lsource = _relay.getSourcesIdNameRegistry().getSource(srcId);
                    if (null != lsource)
                        logicalSources.add(lsource);
                    else {
                        LOG.error("No source name for source id: " + srcId);
                        throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                    }
                } catch (NumberFormatException nfe) {
                    if (relayStatsCollector != null) {
                        relayStatsCollector.registerInvalidRegisterCall();
                    }
                    throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
                }
            }
        }
        SchemaRegistryService schemaRegistry = _relay.getSchemaRegistryService();
        ArrayList<RegisterResponseEntry> registeredSources = new ArrayList<RegisterResponseEntry>(20);
        for (LogicalSource lsource : logicalSources) {
            getSchemas(schemaRegistry, lsource.getName(), lsource.getId(), sources, registeredSources);
        }
        // Note that, as of April 2013, the Espresso sandbox's schema registry
        // (in JSON format) is 4.5 MB and growing.  But 100 KB is probably OK
        // for regular production cases.
        StringWriter out = new StringWriter(102400);
        ObjectMapper mapper = new ObjectMapper();
        // any circumstances under which we might want to override this?
        int registerResponseProtocolVersion = registerRequestProtocolVersion;
        if (// DDSDBUS-2009
        registerRequestProtocolVersion == 4) {
            LOG.debug("Got version 4 /register request; fetching metadata schema.");
            // Get (replication) metadata schema from registry; format it as list
            // of schemas (multiple only if more than one version exists).  Per
            // https://iwww.corp.linkedin.com/wiki/cf/display/ENGS/Espresso+Metadata+Schema,
            // name of replication metadata is simply "metadata".
            ArrayList<RegisterResponseMetadataEntry> registeredMetadata = new ArrayList<RegisterResponseMetadataEntry>(2);
            getMetadataSchemas(schemaRegistry, registeredMetadata);
            // Set up the v4 response as a map:  one entry is the existing list of source
            // schemas, and the others (if present) are the new lists of metadata schema(s)
            // and (TODO) key schemas.
            HashMap<String, List<Object>> responseMap = new HashMap<String, List<Object>>(4);
            responseMap.put(RegisterResponseEntry.SOURCE_SCHEMAS_KEY, (List<Object>) (List<?>) registeredSources);
            if (registeredMetadata.size() > 0) {
                LOG.debug("Sending v4 /register response with metadata schema.");
                responseMap.put(RegisterResponseMetadataEntry.METADATA_SCHEMAS_KEY, (List<Object>) (List<?>) registeredMetadata);
            } else {
                LOG.debug("No metadata schema available; sending v4 /register response without.");
            }
            // TODO:  figure out how to retrieve key schemas and include via RegisterResponseEntry.KEY_SCHEMAS_KEY
            mapper.writeValue(out, responseMap);
        } else // fall back to old style (v2/v3 response)
        {
            mapper.writeValue(out, registeredSources);
        }
        String outStr = out.toString();
        String compress = request.getParams().getProperty(DatabusHttpHeaders.PROTOCOL_COMPRESS_PARAM);
        if ("true".equals(compress)) {
            outStr = CompressUtil.compress(outStr);
        }
        ChunkedWritableByteChannel responseContent = request.getResponseContent();
        byte[] resultBytes = outStr.getBytes(Charset.defaultCharset());
        responseContent.addMetadata(DatabusHttpHeaders.DBUS_CLIENT_RELAY_PROTOCOL_VERSION_HDR, registerResponseProtocolVersion);
        responseContent.write(ByteBuffer.wrap(resultBytes));
        if (null != relayStatsCollector) {
            HttpStatisticsCollector connStatsCollector = (HttpStatisticsCollector) request.getParams().get(relayStatsCollector.getName());
            if (null != connStatsCollector) {
                connStatsCollector.registerRegisterCall(registeredSources);
            } else {
                relayStatsCollector.registerRegisterCall(registeredSources);
            }
        }
        return request;
    } catch (InvalidRequestParamValueException e) {
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        if (null != relayStatsCollector)
            relayStatsCollector.registerInvalidRegisterCall();
        throw e;
    }
}
Also used : ChunkedWritableByteChannel(com.linkedin.databus2.core.container.ChunkedWritableByteChannel) HashMap(java.util.HashMap) SchemaRegistryService(com.linkedin.databus2.schemas.SchemaRegistryService) ArrayList(java.util.ArrayList) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) StringWriter(java.io.StringWriter) RegisterResponseMetadataEntry(com.linkedin.databus2.core.container.request.RegisterResponseMetadataEntry) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry) ArrayList(java.util.ArrayList) List(java.util.List) ObjectMapper(org.codehaus.jackson.map.ObjectMapper)

Example 3 with HttpStatisticsCollector

use of com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector in project databus by linkedin.

the class RegisterRequestProcessor method getMetadataSchemas.

/**
 * Returns list of versioned metadata schemas.
 * TODO (DDSDBUS-2093):  implement this.
 */
private // IN
void getMetadataSchemas(// IN
SchemaRegistryService schemaRegistry, // OUT
ArrayList<RegisterResponseMetadataEntry> registeredMetadata) throws RequestProcessingException {
    Map<SchemaId, VersionedSchema> versionedSchemas = null;
    try {
        VersionedSchemaSet schemaSet = schemaRegistry.fetchAllMetadataSchemaVersions();
        if (schemaSet != null) {
            versionedSchemas = schemaSet.getAllVersionsWithSchemaId(SchemaRegistryService.DEFAULT_METADATA_SCHEMA_SOURCE);
        }
    } catch (DatabusException ie) {
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        if (relayStatsCollector != null)
            relayStatsCollector.registerInvalidRegisterCall();
        throw new RequestProcessingException(ie);
    }
    if (versionedSchemas != null && !versionedSchemas.isEmpty()) {
        for (SchemaId id : versionedSchemas.keySet()) {
            VersionedSchema entry = versionedSchemas.get(id);
            if (entry.getOrigSchemaStr() == null) {
                throw new RequestProcessingException("Null schema string for metadata version " + entry.getVersion());
            }
            registeredMetadata.add(new RegisterResponseMetadataEntry((short) entry.getVersion(), entry.getOrigSchemaStr(), id.getByteArray()));
        }
    }
}
Also used : DatabusException(com.linkedin.databus2.core.DatabusException) RegisterResponseMetadataEntry(com.linkedin.databus2.core.container.request.RegisterResponseMetadataEntry) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) SchemaId(com.linkedin.databus2.schemas.SchemaId) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException) VersionedSchemaSet(com.linkedin.databus2.schemas.VersionedSchemaSet) VersionedSchema(com.linkedin.databus2.schemas.VersionedSchema)

Example 4 with HttpStatisticsCollector

use of com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector in project databus by linkedin.

the class RegisterRequestProcessor method getSchemas.

/**
 * Returns list of versioned source (or key?) schemas for the given source name and associates
 * them with the specified ID.
 * TODO:  either add support for key schemas or rename method to getSourceSchemas()
 */
private // IN
void getSchemas(// IN
SchemaRegistryService schemaRegistry, // IN:  source name
String name, // IN
Integer sourceId, // IN (for error-logging only)
String sources, // OUT
ArrayList<RegisterResponseEntry> registeredSources) throws RequestProcessingException {
    Map<Short, String> versionedSchemas = null;
    try {
        versionedSchemas = schemaRegistry.fetchAllSchemaVersionsBySourceName(name);
    } catch (DatabusException ie) {
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        if (relayStatsCollector != null)
            relayStatsCollector.registerInvalidRegisterCall();
        throw new RequestProcessingException(ie);
    }
    if ((null == versionedSchemas) || (versionedSchemas.isEmpty())) {
        HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
        if (relayStatsCollector != null)
            relayStatsCollector.registerInvalidRegisterCall();
        LOG.error("Problem fetching schema for sourceId " + sourceId + "; sources string = " + sources);
    } else {
        for (Entry<Short, String> e : versionedSchemas.entrySet()) {
            registeredSources.add(new RegisterResponseEntry(sourceId.longValue(), e.getKey(), e.getValue()));
        }
    }
}
Also used : DatabusException(com.linkedin.databus2.core.DatabusException) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) RegisterResponseEntry(com.linkedin.databus2.core.container.request.RegisterResponseEntry) RequestProcessingException(com.linkedin.databus2.core.container.request.RequestProcessingException)

Example 5 with HttpStatisticsCollector

use of com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector in project databus by linkedin.

the class SourcesRequestProcessor method process.

@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException {
    int protoVersion = request.getOptionalIntParam(VERSION_PARAM_NAME, 1);
    ObjectMapper mapper = new ObjectMapper();
    StringWriter out = new StringWriter(10240);
    Collection<LogicalSource> sources = _relay.getSourcesIdNameRegistry().getAllSources();
    if (1 == protoVersion) {
        ArrayList<IdNamePair> sourcePairs = new ArrayList<IdNamePair>(sources.size());
        for (LogicalSource source : sources) sourcePairs.add(new IdNamePair(source.getId().longValue(), source.getName()));
        mapper.writeValue(out, sourcePairs);
    } else if (2 == protoVersion)
        mapper.writeValue(out, sources);
    else
        throw new InvalidRequestParamValueException(COMMAND_NAME, VERSION_PARAM_NAME, Integer.toString(protoVersion));
    byte[] resultBytes = out.toString().getBytes(Charset.defaultCharset());
    request.getResponseContent().write(ByteBuffer.wrap(resultBytes));
    HttpStatisticsCollector relayStatsCollector = _relay.getHttpStatisticsCollector();
    if (null != relayStatsCollector) {
        HttpStatisticsCollector connStatsCollector = (HttpStatisticsCollector) request.getParams().get(relayStatsCollector.getName());
        if (null != connStatsCollector) {
            connStatsCollector.registerSourcesCall();
        } else {
            relayStatsCollector.registerSourcesCall();
        }
    }
    return request;
}
Also used : StringWriter(java.io.StringWriter) HttpStatisticsCollector(com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector) ArrayList(java.util.ArrayList) IdNamePair(com.linkedin.databus.core.util.IdNamePair) LogicalSource(com.linkedin.databus.core.data_model.LogicalSource) InvalidRequestParamValueException(com.linkedin.databus2.core.container.request.InvalidRequestParamValueException) ObjectMapper(org.codehaus.jackson.map.ObjectMapper)

Aggregations

HttpStatisticsCollector (com.linkedin.databus2.core.container.monitoring.mbean.HttpStatisticsCollector)6 ArrayList (java.util.ArrayList)4 ObjectMapper (org.codehaus.jackson.map.ObjectMapper)4 LogicalSource (com.linkedin.databus.core.data_model.LogicalSource)3 DatabusException (com.linkedin.databus2.core.DatabusException)3 InvalidRequestParamValueException (com.linkedin.databus2.core.container.request.InvalidRequestParamValueException)3 RequestProcessingException (com.linkedin.databus2.core.container.request.RequestProcessingException)3 StringWriter (java.io.StringWriter)3 RegisterResponseEntry (com.linkedin.databus2.core.container.request.RegisterResponseEntry)2 RegisterResponseMetadataEntry (com.linkedin.databus2.core.container.request.RegisterResponseMetadataEntry)2 Checkpoint (com.linkedin.databus.core.Checkpoint)1 CheckpointMult (com.linkedin.databus.core.CheckpointMult)1 DbusEventBufferBatchReadable (com.linkedin.databus.core.DbusEventBufferBatchReadable)1 PhysicalPartitionKey (com.linkedin.databus.core.DbusEventBufferMult.PhysicalPartitionKey)1 Encoding (com.linkedin.databus.core.Encoding)1 OffsetNotFoundException (com.linkedin.databus.core.OffsetNotFoundException)1 ScnNotFoundException (com.linkedin.databus.core.ScnNotFoundException)1 StreamEventsResult (com.linkedin.databus.core.StreamEventsResult)1 DatabusSubscription (com.linkedin.databus.core.data_model.DatabusSubscription)1 PhysicalPartition (com.linkedin.databus.core.data_model.PhysicalPartition)1