use of org.codehaus.jackson.type.TypeReference in project android-app by eoecn.
the class BlogsDao method mapperJson.
public BlogsResponseEntity mapperJson(boolean useCache) {
BlogsJson blogsJson_;
try {
String result = RequestCacheUtil.getRequestContent(mActivity, Urls.BLOGS_LIST + Utility.getScreenParams(mActivity), Constants.WebSourceType.Json, Constants.DBContentType.Content_list, useCache);
blogsJson_ = mObjectMapper.readValue(result, new TypeReference<BlogsJson>() {
});
if (blogsJson_ == null) {
return null;
}
_blogsResponse = blogsJson_.getResponse();
return _blogsResponse;
} catch (JsonParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JsonMappingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
use of org.codehaus.jackson.type.TypeReference in project NabAlive by jcheype.
the class NabaztagController method init.
@PostConstruct
void init() {
restHandler.get(new Route("/nabaztags") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
List<Nabaztag> nabaztagList = nabaztagDAO.find(nabaztagDAO.createQuery().filter("owner", token.getUserId())).asList();
for (Nabaztag nabaztag : nabaztagList) {
if (connectionManager.containsKey(nabaztag.getMacAddress())) {
nabaztag.setConnected(true);
}
}
response.writeJSON(nabaztagList);
}
}).post(new Route("/nabaztags", ".*json.*") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
logger.debug("received json: {}", request.content);
Map<String, String> nabMap = mapper.readValue(request.content, Map.class);
String mac = CharMatcher.JAVA_LETTER_OR_DIGIT.retainFrom(checkNotNull(nabMap.get("mac")).toLowerCase());
if (!connectionManager.containsKey(mac)) {
throw new HttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Nabaztag not Connected");
}
Nabaztag nabaztag = new Nabaztag();
nabaztag.setMacAddress(mac);
nabaztag.setName(nabMap.get("name"));
nabaztag.setApikey(UUID.randomUUID().toString());
nabaztag.setOwner(token.getUserId());
try {
nabaztagDAO.save(nabaztag);
} catch (MongoException.DuplicateKey e) {
ImmutableMap<String, String> error = (new ImmutableMap.Builder<String, String>()).put("error", "Adresse mac déjà enregistrée").build();
response.writeJSON(error);
return;
}
messageService.sendMessage(mac, "ST " + OPERATIONNEL_URL + "\nMW\n");
response.writeJSON(nabaztag);
}
}).post(new Route("/nabaztags") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
String mac = CharMatcher.JAVA_LETTER_OR_DIGIT.retainFrom(checkNotNull(request.getParam("mac")).toLowerCase());
String name = request.getParam("name");
if (!connectionManager.containsKey(mac)) {
throw new HttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Nabaztag not Connected");
}
Nabaztag nabaztag = new Nabaztag();
nabaztag.setMacAddress(mac);
nabaztag.setName(name);
nabaztag.setApikey(UUID.randomUUID().toString());
nabaztag.setOwner(token.getUserId());
try {
nabaztagDAO.save(nabaztag);
} catch (MongoException.DuplicateKey e) {
ImmutableMap<String, String> error = (new ImmutableMap.Builder<String, String>()).put("error", "Adresse mac déjà enregistrée").build();
response.writeJSON(error);
return;
}
messageService.sendMessage(mac, "ST " + OPERATIONNEL_URL + "\nMW\n");
response.writeJSON(nabaztag);
}
}).post(new Route("/nabaztags/:mac/addconfig") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
List<String> tags = request.parameters.get("rfid");
String appApikey = checkNotNull(request.getParam("apikey"));
String name = checkNotNull(request.getParam("name"));
String appName = checkNotNull(request.getParam("appName"));
String uuid = request.getParam("uuid");
String mac = checkNotNull(map.get("mac"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("macAddress", mac));
if (!nabaztag.getOwner().equals(token.getUserId()))
throw new IllegalArgumentException();
Iterator<ApplicationConfig> iterator = nabaztag.getApplicationConfigList().iterator();
while (iterator.hasNext()) {
ApplicationConfig next = iterator.next();
if (tags != null)
next.getTags().removeAll(tags);
if (next.getUuid().equals(uuid))
iterator.remove();
}
ApplicationConfig config = new ApplicationConfig();
config.setApplicationStoreApikey(appApikey);
config.getTags().clear();
if (tags != null)
config.getTags().addAll(tags);
for (Map.Entry<String, List<String>> entry : request.parameters.entrySet()) {
if (entry.getKey().startsWith("parameter.")) {
String key = entry.getKey().substring("parameter.".length());
config.getParameters().put(key, entry.getValue());
}
}
config.setName(name);
config.setAppName(appName);
nabaztag.addApplicationConfig(config);
nabaztagDAO.save(nabaztag);
tts(nabaztag.getMacAddress(), request.request.getHeader("Host"), "fr", Format.get("app.install.success", appName));
response.writeJSON(nabaztag);
}
}).delete(new Route("/config/:uuid") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
String uuid = checkNotNull(map.get("uuid"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("applicationConfigList.uuid", uuid));
if (!nabaztag.getOwner().equals(token.getUserId()))
throw new IllegalArgumentException();
Iterator<ApplicationConfig> iterator = nabaztag.getApplicationConfigList().iterator();
while (iterator.hasNext()) {
ApplicationConfig next = iterator.next();
if (next.getUuid().equals(uuid))
iterator.remove();
}
nabaztagDAO.save(nabaztag);
response.writeJSON(nabaztag);
}
}).get(new Route("/nabaztags/:mac") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
String mac = checkNotNull(map.get("mac"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("macAddress", mac));
if (token.getUserId().equals(nabaztag.getOwner())) {
response.writeJSON(nabaztag);
} else {
response.write(new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED));
}
}
}).delete(new Route("/nabaztags/:mac") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
Token token = TokenUtil.decode(checkNotNull(request.getParamOrHeader("token")), Token.class);
String mac = checkNotNull(map.get("mac"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("macAddress", mac));
if (token.getUserId().equals(nabaztag.getOwner())) {
nabaztagDAO.delete(nabaztag);
response.writeJSON("ok");
} else {
response.write(new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED));
}
}
}).get(new Route("/nabaztags/:apikey/play") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
List<String> urlList = checkNotNull(request.qs.getParameters().get("url"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
List<String> urlListSanitized = Lists.transform(urlList, new Function<String, String>() {
@Override
public String apply(@Nullable String url) {
return CharMatcher.isNot('\n').retainFrom(url);
}
});
StringBuilder commands = new StringBuilder();
for (String url : urlListSanitized) {
commands.append("ST " + url + "\nMW\n");
}
logger.debug("COMMAND: {}", commands);
messageService.sendMessage(nabaztag.getMacAddress(), commands.toString());
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/tts/:voice") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String text = checkNotNull(request.getParam("text"));
String voice = checkNotNull(map.get("voice"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
String host = request.request.getHeader("Host");
tts(nabaztag.getMacAddress(), host, voice, text);
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/exec") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String command = checkNotNull(request.getParam("command"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
logger.debug("COMMAND: {}", command);
messageService.sendMessage(nabaztag.getMacAddress(), command);
response.writeJSON("ok");
}
}).post(new Route("/nabaztags/:apikey/tags", ".*json.*") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
List<Tag> tagList = mapper.readValue(request.content, new TypeReference<List<Tag>>() {
});
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
nabaztagDAO.update(nabaztagDAO.createQuery().filter("apikey", apikey), nabaztagDAO.createUpdateOperations().set("tags", tagList));
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/sleep") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
messageService.sendMessage(nabaztag.getMacAddress(), new SleepPacket(SleepPacket.Action.Sleep));
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/wakeup") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
messageService.sendMessage(nabaztag.getMacAddress(), new SleepPacket(SleepPacket.Action.WakeUp));
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/tz") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String tz = checkNotNull(request.getParam("tz"));
Query<Nabaztag> query = nabaztagDAO.createQuery().filter("apikey", apikey);
UpdateOperations<Nabaztag> updateOperations = nabaztagDAO.createUpdateOperations();
updateOperations.set("timeZone", tz);
nabaztagDAO.update(query, updateOperations);
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/schedule") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
List<String> sleep = request.parameters.get("sleep[]");
List<String> wakeup = request.parameters.get("wakeup[]");
final Nabaztag nabaztag = nabaztagDAO.findOne("apikey", apikey);
Query<Nabaztag> query = nabaztagDAO.createQuery().filter("apikey", apikey);
UpdateOperations<Nabaztag> updateOperations = nabaztagDAO.createUpdateOperations();
if (!sleep.isEmpty()) {
nabaztag.setSleepLocal(sleep);
Set<String> nabaztagSleep = nabaztag.getSleep();
logger.debug("sleep {}", nabaztagSleep);
updateOperations.set("sleep", nabaztagSleep);
}
if (!wakeup.isEmpty()) {
nabaztag.setWakeupLocal(wakeup);
Set<String> nabaztagWakeup = nabaztag.getWakeup();
logger.debug("wakeup {}", nabaztagWakeup);
updateOperations.set("wakeup", nabaztagWakeup);
}
nabaztagDAO.update(query, updateOperations);
response.writeJSON("ok");
}
}).get(new Route("/nabaztags/:apikey/subscribe") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String email = checkNotNull(request.getParam("email"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
Set<Subscription> subscriptionSet = nabaztag.getSubscribe();
User user = checkNotNull(userDAO.findOne("email", email));
Query<Nabaztag> query = nabaztagDAO.createQuery().filter("owner", user.getId());
for (Nabaztag nab : nabaztagDAO.find(query).asList()) {
Subscription subscription = new Subscription();
subscription.setName(nab.getName());
subscription.setOwnerFisrtName(user.getFirstname());
subscription.setOwnerLastName(user.getLastname());
subscription.setObjectId(nab.getId().toString());
subscriptionSet.add(subscription);
}
UpdateOperations<Nabaztag> updateOperations = nabaztagDAO.createUpdateOperations().set("subscribe", subscriptionSet);
nabaztagDAO.update(nabaztagDAO.createQuery().filter("_id", nabaztag.getId()), updateOperations);
response.writeJSON("ok");
}
}).delete(new Route("/nabaztags/:apikey/subscribe/:objectId") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String objectId = checkNotNull(map.get("objectId"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
Set<Subscription> subscriptionSet = nabaztag.getSubscribe();
Iterator<Subscription> iterator = subscriptionSet.iterator();
while (iterator.hasNext()) {
Subscription next = iterator.next();
if (next.getObjectId().equals(objectId))
iterator.remove();
}
UpdateOperations<Nabaztag> updateOperations = nabaztagDAO.createUpdateOperations().set("subscribe", subscriptionSet);
nabaztagDAO.update(nabaztagDAO.createQuery().filter("_id", nabaztag.getId()), updateOperations);
response.writeJSON("ok");
}
}).get(new Route("/nab2nabs/:apikey/send") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String url = checkNotNull(request.getParam("url"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
Set<Subscription> subscriptionSet = nabaztag.getSubscribe();
List<ObjectId> objectList = new ArrayList<ObjectId>();
for (Subscription subscription : subscriptionSet) {
objectList.add(new ObjectId(subscription.getObjectId()));
}
List<Nabaztag> nabaztagList = nabaztagDAO.find(nabaztagDAO.createQuery().field("_id").in(objectList)).asList();
String command = "ST " + url + "\nMW\n";
for (Nabaztag nab : nabaztagList) {
if (connectionManager.containsKey(nab.getMacAddress()))
messageService.sendMessage(nab.getMacAddress(), command);
}
response.writeJSON("ok");
}
}).get(new Route("/nab2nabs/:apikey/tts") {
@Override
public void handle(Request request, Response response, Map<String, String> map) throws Exception {
String apikey = checkNotNull(map.get("apikey"));
String text = checkNotNull(request.getParam("text"));
Nabaztag nabaztag = checkNotNull(nabaztagDAO.findOne("apikey", apikey));
Set<Subscription> subscriptionSet = nabaztag.getSubscribe();
List<ObjectId> objectList = new ArrayList<ObjectId>();
for (Subscription subscription : subscriptionSet) {
objectList.add(new ObjectId(subscription.getObjectId()));
}
List<Nabaztag> nabaztagList = nabaztagDAO.find(nabaztagDAO.createQuery().field("_id").in(objectList)).asList();
for (Nabaztag nab : nabaztagList) {
if (connectionManager.containsKey(nab.getMacAddress())) {
String host = request.request.getHeader("Host");
tts(nab.getMacAddress(), host, "FR", text);
}
}
response.writeJSON("ok");
}
});
}
use of org.codehaus.jackson.type.TypeReference in project databus by linkedin.
the class ReadEventsRequestProcessor method process.
@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException, DatabusException {
boolean isDebug = LOG.isDebugEnabled();
try {
ObjectMapper objMapper = new ObjectMapper();
String checkpointString = request.getParams().getProperty(CHECKPOINT_PARAM, null);
String checkpointStringMult = request.getParams().getProperty(CHECKPOINT_PARAM_MULT, null);
int fetchSize = request.getRequiredIntParam(FETCH_SIZE_PARAM);
String formatStr = request.getRequiredStringParam(OUTPUT_FORMAT_PARAM);
Encoding enc = Encoding.valueOf(formatStr.toUpperCase());
String sourcesListStr = request.getParams().getProperty(SOURCES_PARAM, null);
String subsStr = request.getParams().getProperty(SUBS_PARAM, null);
String partitionInfoStr = request.getParams().getProperty(PARTITION_INFO_STRING);
String streamFromLatestSCNStr = request.getParams().getProperty(STREAM_FROM_LATEST_SCN);
String clientMaxEventVersionStr = request.getParams().getProperty(DatabusHttpHeaders.MAX_EVENT_VERSION);
int clientEventVersion = (clientMaxEventVersionStr != null) ? Integer.parseInt(clientMaxEventVersionStr) : DbusEventFactory.DBUS_EVENT_V1;
if (clientEventVersion < 0 || clientEventVersion == 1 || clientEventVersion > DbusEventFactory.DBUS_EVENT_V2) {
throw new InvalidRequestParamValueException(COMMAND_NAME, DatabusHttpHeaders.MAX_EVENT_VERSION, clientMaxEventVersionStr);
}
if (null == sourcesListStr && null == subsStr) {
throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM + "|" + SUBS_PARAM, "null");
}
//TODO for now we separte the code paths to limit the impact on existing Databus 2 deployments (DDSDBUS-79)
//We have to get rid of this eventually and have a single data path.
boolean v2Mode = null == subsStr;
DbusKeyCompositeFilter keyCompositeFilter = null;
if (null != partitionInfoStr) {
try {
Map<Long, DbusKeyFilter> fMap = KeyFilterConfigJSONFactory.parseSrcIdFilterConfigMap(partitionInfoStr);
keyCompositeFilter = new DbusKeyCompositeFilter();
keyCompositeFilter.setFilterMap(fMap);
if (isDebug)
LOG.debug("keyCompositeFilter is :" + keyCompositeFilter);
} catch (Exception ex) {
String msg = "Got exception while parsing partition Configs. PartitionInfo is:" + partitionInfoStr;
LOG.error(msg, ex);
throw new InvalidRequestParamValueException(COMMAND_NAME, PARTITION_INFO_STRING, partitionInfoStr);
}
}
boolean streamFromLatestSCN = false;
if (null != streamFromLatestSCNStr) {
streamFromLatestSCN = Boolean.valueOf(streamFromLatestSCNStr);
}
long start = System.currentTimeMillis();
List<DatabusSubscription> subs = null;
//parse source ids
SourceIdNameRegistry srcRegistry = _relay.getSourcesIdNameRegistry();
HashSet<Integer> sourceIds = new HashSet<Integer>();
if (null != sourcesListStr) {
String[] sourcesList = sourcesListStr.split(",");
for (String sourceId : sourcesList) {
try {
Integer srcId = Integer.valueOf(sourceId);
sourceIds.add(srcId);
} catch (NumberFormatException nfe) {
HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
if (null != globalHttpStatsCollector) {
globalHttpStatsCollector.registerInvalidStreamRequest();
}
throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, sourceId);
}
}
}
//process explicit subscriptions and generate respective logical partition filters
NavigableSet<PhysicalPartitionKey> ppartKeys = null;
if (null != subsStr) {
List<DatabusSubscription.Builder> subsBuilder = null;
subsBuilder = objMapper.readValue(subsStr, new TypeReference<List<DatabusSubscription.Builder>>() {
});
subs = new ArrayList<DatabusSubscription>(subsBuilder.size());
for (DatabusSubscription.Builder subBuilder : subsBuilder) {
subs.add(subBuilder.build());
}
ppartKeys = new TreeSet<PhysicalPartitionKey>();
for (DatabusSubscription sub : subs) {
PhysicalPartition ppart = sub.getPhysicalPartition();
if (ppart.isAnyPartitionWildcard()) {
ppartKeys = _eventBuffer.getAllPhysicalPartitionKeys();
break;
} else {
ppartKeys.add(new PhysicalPartitionKey(ppart));
}
}
}
// Need to make sure that we don't have tests that send requests in this form.
if (subs != null && checkpointStringMult == null && checkpointString != null) {
throw new RequestProcessingException("Both Subscriptions and CheckpointMult should be present");
}
//convert source ids into subscriptions
if (null == subs)
subs = new ArrayList<DatabusSubscription>();
for (Integer srcId : sourceIds) {
LogicalSource lsource = srcRegistry.getSource(srcId);
if (lsource == null)
throw new InvalidRequestParamValueException(COMMAND_NAME, SOURCES_PARAM, srcId.toString());
if (isDebug)
LOG.debug("registry returns " + lsource + " for srcid=" + srcId);
DatabusSubscription newSub = DatabusSubscription.createSimpleSourceSubscription(lsource);
subs.add(newSub);
}
DbusFilter ppartFilters = null;
if (subs.size() > 0) {
try {
ppartFilters = _eventBuffer.constructFilters(subs);
} catch (DatabusException de) {
throw new RequestProcessingException("unable to generate physical partitions filters:" + de.getMessage(), de);
}
}
ConjunctionDbusFilter filters = new ConjunctionDbusFilter();
// Source filter comes first
if (v2Mode)
filters.addFilter(new SourceDbusFilter(sourceIds));
else if (null != ppartFilters)
filters.addFilter(ppartFilters);
/*
// Key range filter comes next
if ((keyMin >0) && (keyMax > 0))
{
filters.addFilter(new KeyRangeFilter(keyMin, keyMax));
}
*/
if (null != keyCompositeFilter) {
filters.addFilter(keyCompositeFilter);
}
// need to update registerStreamRequest to support Mult checkpoint TODO (DDSDBUS-80)
// temp solution
// 3 options:
// 1. checkpointStringMult not null - generate checkpoint from it
// 2. checkpointStringMult null, checkpointString not null - create empty CheckpointMult
// and add create Checkpoint(checkpointString) and add it to cpMult;
// 3 both are null - create empty CheckpointMult and add empty Checkpoint to it for each ppartition
PhysicalPartition pPartition;
Checkpoint cp = null;
CheckpointMult cpMult = null;
if (checkpointStringMult != null) {
try {
cpMult = new CheckpointMult(checkpointStringMult);
} catch (InvalidParameterSpecException e) {
LOG.error("Invalid CheckpointMult:" + checkpointStringMult, e);
throw new InvalidRequestParamValueException("stream", "CheckpointMult", checkpointStringMult);
}
} else {
// there is no checkpoint - create an empty one
cpMult = new CheckpointMult();
Iterator<Integer> it = sourceIds.iterator();
while (it.hasNext()) {
Integer srcId = it.next();
pPartition = _eventBuffer.getPhysicalPartition(srcId);
if (pPartition == null)
throw new RequestProcessingException("unable to find physical partitions for source:" + srcId);
if (checkpointString != null) {
cp = new Checkpoint(checkpointString);
} else {
cp = new Checkpoint();
cp.setFlexible();
}
cpMult.addCheckpoint(pPartition, cp);
}
}
if (isDebug)
LOG.debug("checkpointStringMult = " + checkpointStringMult + ";singlecheckpointString=" + checkpointString + ";CPM=" + cpMult);
// of the server context.
if (cpMult.getCursorPartition() == null) {
cpMult.setCursorPartition(request.getCursorPartition());
}
if (isDebug) {
if (cpMult.getCursorPartition() != null) {
LOG.debug("Using physical paritition cursor " + cpMult.getCursorPartition());
}
}
// for registerStreamRequest we need a single Checkpoint (TODO - fix it) (DDSDBUS-81)
if (cp == null) {
Iterator<Integer> it = sourceIds.iterator();
if (it.hasNext()) {
Integer srcId = it.next();
pPartition = _eventBuffer.getPhysicalPartition(srcId);
cp = cpMult.getCheckpoint(pPartition);
} else {
cp = new Checkpoint();
cp.setFlexible();
}
}
if (null != checkpointString && isDebug)
LOG.debug("About to stream from cp: " + checkpointString.toString());
HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
HttpStatisticsCollector connHttpStatsCollector = null;
if (null != globalHttpStatsCollector) {
connHttpStatsCollector = (HttpStatisticsCollector) request.getParams().get(globalHttpStatsCollector.getName());
}
if (null != globalHttpStatsCollector)
globalHttpStatsCollector.registerStreamRequest(cp, sourceIds);
StatsCollectors<DbusEventsStatisticsCollector> statsCollectors = _relay.getOutBoundStatsCollectors();
try {
DbusEventBufferBatchReadable bufRead = v2Mode ? _eventBuffer.getDbusEventBufferBatchReadable(sourceIds, cpMult, statsCollectors) : _eventBuffer.getDbusEventBufferBatchReadable(cpMult, ppartKeys, statsCollectors);
int eventsRead = 0;
int minPendingEventSize = 0;
StreamEventsResult result = null;
bufRead.setClientMaxEventVersion(clientEventVersion);
if (v2Mode) {
result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
eventsRead = result.getNumEventsStreamed();
minPendingEventSize = result.getSizeOfPendingEvent();
if (isDebug) {
LOG.debug("Process: streamed " + eventsRead + " from sources " + Arrays.toString(sourceIds.toArray()));
//can be used for debugging to stream from a cp
LOG.debug("CP=" + cpMult);
}
//if (null != statsCollectors) statsCollectors.mergeStatsCollectors();
} else {
result = bufRead.streamEvents(streamFromLatestSCN, fetchSize, request.getResponseContent(), enc, filters);
eventsRead = result.getNumEventsStreamed();
minPendingEventSize = result.getSizeOfPendingEvent();
if (isDebug)
LOG.debug("Process: streamed " + eventsRead + " with subscriptions " + subs);
cpMult = bufRead.getCheckpointMult();
if (cpMult != null) {
request.setCursorPartition(cpMult.getCursorPartition());
}
}
if (eventsRead == 0 && minPendingEventSize > 0) {
// Append a header to indicate to the client that we do have at least one event to
// send, but it is too large to fit into client's offered buffer.
request.getResponseContent().addMetadata(DatabusHttpHeaders.DATABUS_PENDING_EVENT_SIZE, minPendingEventSize);
LOG.debug("Returning 0 events but have pending event of size " + minPendingEventSize);
}
} catch (ScnNotFoundException snfe) {
if (null != globalHttpStatsCollector) {
globalHttpStatsCollector.registerScnNotFoundStreamResponse();
}
throw new RequestProcessingException(snfe);
} catch (OffsetNotFoundException snfe) {
LOG.error("OffsetNotFound", snfe);
if (null != globalHttpStatsCollector) {
globalHttpStatsCollector.registerScnNotFoundStreamResponse();
}
throw new RequestProcessingException(snfe);
}
if (null != connHttpStatsCollector) {
connHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
globalHttpStatsCollector.merge(connHttpStatsCollector);
connHttpStatsCollector.reset();
} else if (null != globalHttpStatsCollector) {
globalHttpStatsCollector.registerStreamResponse(System.currentTimeMillis() - start);
}
} catch (InvalidRequestParamValueException e) {
HttpStatisticsCollector globalHttpStatsCollector = _relay.getHttpStatisticsCollector();
if (null != globalHttpStatsCollector) {
globalHttpStatsCollector.registerInvalidStreamRequest();
}
throw e;
}
return request;
}
use of org.codehaus.jackson.type.TypeReference in project databus by linkedin.
the class SchemaMetaDataManager method populatePhysicalToLogicalSrcMap.
private void populatePhysicalToLogicalSrcMap() throws IOException {
FileInputStream fStream = null;
try {
fStream = new FileInputStream(new File(_physicalToLogicalSrcMapFile));
ObjectMapper m = new ObjectMapper();
_physicalToLogicalSrcMap = m.readValue(fStream, new TypeReference<TreeMap<String, TreeSet<String>>>() {
});
} finally {
if (null != fStream)
fStream.close();
}
}
use of org.codehaus.jackson.type.TypeReference in project databus by linkedin.
the class TestDbusEvent method testAppendToEventBuffer_one.
@Test
public void testAppendToEventBuffer_one() throws Exception {
String valueStr = "testvalue";
ByteBuffer serializationBuffer = ByteBuffer.allocate(1000).order(_eventV1Factory.getByteOrder());
DbusEventInfo eventInfo = new DbusEventInfo(null, 2L, (short) 0, (short) 3, 4L, (short) 5, schemaId, valueStr.getBytes(Charset.defaultCharset()), false, true);
// make this explicit
eventInfo.setEventSerializationVersion(DbusEventFactory.DBUS_EVENT_V1);
DbusEventFactory.serializeEvent(new DbusEventKey(1L), serializationBuffer, eventInfo);
DbusEventInternalReadable event1 = _eventV1Factory.createReadOnlyDbusEventFromBuffer(serializationBuffer, 0);
assertTrue("event crc correct", event1.isValid());
//test JSON_PLAIN_VALUE
ByteArrayOutputStream jsonOut = new ByteArrayOutputStream();
WritableByteChannel jsonOutChannel = Channels.newChannel(jsonOut);
event1.writeTo(jsonOutChannel, Encoding.JSON_PLAIN_VALUE);
byte[] jsonBytes = jsonOut.toByteArray();
String jsonString = new String(jsonBytes);
ObjectMapper mapper = new ObjectMapper();
Map<String, Object> jsonMap = mapper.readValue(jsonString, new TypeReference<Map<String, Object>>() {
});
assertEquals("key correct", 1L, ((Number) jsonMap.get("key")).longValue());
assertEquals("sequence correct", 2L, ((Number) jsonMap.get("sequence")).longValue());
assertEquals("partitionId correct", 3, ((Number) jsonMap.get("logicalPartitionId")).shortValue());
assertEquals("timestamp correct", 4L, ((Number) jsonMap.get("timestampInNanos")).longValue());
assertEquals("srcId correct", 5, ((Number) jsonMap.get("srcId")).longValue());
assertEquals("schemaId correct", Base64.encodeBytes(schemaId), jsonMap.get("schemaId"));
assertEquals("valueEnc correct", Encoding.JSON_PLAIN_VALUE.toString(), jsonMap.get("valueEnc"));
assertEquals("value correct", valueStr, jsonMap.get("value"));
DbusEventBuffer eventBuffer1 = new DbusEventBuffer(getConfig(100000, DbusEventBuffer.Config.DEFAULT_INDIVIDUAL_BUFFER_SIZE, 10000, 1000, AllocationPolicy.HEAP_MEMORY, QueuePolicy.OVERWRITE_ON_WRITE));
eventBuffer1.startEvents();
assertEquals("json deserialization", 1, DbusEventSerializable.appendToEventBuffer(jsonString, eventBuffer1, null, false));
eventBuffer1.endEvents(2);
DbusEventIterator it1 = eventBuffer1.acquireIterator("it1");
assertTrue("buffer has event", it1.hasNext());
DbusEvent testEvent = it1.next();
assertEquals("key correct", 1L, testEvent.key());
assertEquals("sequence correct", 2L, testEvent.sequence());
assertEquals("partitionId correct", 3, testEvent.logicalPartitionId());
assertEquals("timestamp correct", 4L, testEvent.timestampInNanos());
assertEquals("srcId correct", 5, testEvent.srcId());
assertEquals("schemaId correct", new String(schemaId), new String(testEvent.schemaId()));
assertEquals("value correct", valueStr, Utils.byteBufferToString(testEvent.value()));
assertEquals("Get DbusEventKey", 1L, ((DbusEventInternalReadable) testEvent).getDbusEventKey().getLongKey().longValue());
//test JSON
jsonOut = new ByteArrayOutputStream();
jsonOutChannel = Channels.newChannel(jsonOut);
event1.writeTo(jsonOutChannel, Encoding.JSON);
jsonBytes = jsonOut.toByteArray();
jsonString = new String(jsonBytes);
jsonMap = mapper.readValue(jsonString, new TypeReference<Map<String, Object>>() {
});
assertEquals("key correct", 1L, ((Number) jsonMap.get("key")).longValue());
assertEquals("sequence correct", 2L, ((Number) jsonMap.get("sequence")).longValue());
assertEquals("logicalPartitionId correct", 3, ((Number) jsonMap.get("logicalPartitionId")).shortValue());
assertEquals("timestampInNanos correct", 4L, ((Number) jsonMap.get("timestampInNanos")).longValue());
assertEquals("srcId correct", 5, ((Number) jsonMap.get("srcId")).longValue());
assertEquals("schemaId correct", Base64.encodeBytes(schemaId), jsonMap.get("schemaId"));
assertEquals("valueEnc correct", Encoding.JSON.toString(), jsonMap.get("valueEnc"));
assertEquals("value correct", Base64.encodeBytes(valueStr.getBytes(Charset.defaultCharset())), jsonMap.get("value"));
assertTrue("buffer has event", it1.hasNext());
testEvent = it1.next();
assertTrue("end of window", testEvent.isEndOfPeriodMarker());
DbusEventBuffer eventBuffer2 = new DbusEventBuffer(getConfig(100000, DbusEventBuffer.Config.DEFAULT_INDIVIDUAL_BUFFER_SIZE, 10000, 1000, AllocationPolicy.HEAP_MEMORY, QueuePolicy.OVERWRITE_ON_WRITE));
eventBuffer2.startEvents();
assertTrue("json deserialization", (DbusEventSerializable.appendToEventBuffer(jsonString, eventBuffer2, null, false) > 0));
eventBuffer2.endEvents(2);
DbusEventIterator it2 = eventBuffer2.acquireIterator("it2");
assertTrue("buffer has event", it2.hasNext());
testEvent = it2.next();
assertEquals("key correct", 1L, testEvent.key());
assertEquals("partitionId correct", 3, testEvent.logicalPartitionId());
assertEquals("timestamp correct", 4L, testEvent.timestampInNanos());
assertEquals("srcId correct", 5, testEvent.srcId());
assertEquals("schemaId correct", new String(schemaId), new String(testEvent.schemaId()));
assertEquals("value correct", valueStr, Utils.byteBufferToString(testEvent.value()));
assertEquals("Get DbusEventKey", 1L, ((DbusEventInternalReadable) testEvent).getDbusEventKey().getLongKey().longValue());
assertTrue("buffer has event", it2.hasNext());
testEvent = it2.next();
assertTrue("end of window", testEvent.isEndOfPeriodMarker());
}
Aggregations