use of org.apache.thrift.TException in project jstorm by alibaba.
the class ServiceHandler method rebalance.
/**
* rebalance one topology
*
* @param topologyName topology name
* @param options RebalanceOptions
* @@@ rebalance options hasn't implements
* <p/>
* It is used to let workers wait several seconds to finish jobs
*/
@Override
public void rebalance(String topologyName, RebalanceOptions options) throws TException, NotAliveException {
try {
checkTopologyActive(data, topologyName, true);
Integer wait_amt = null;
String jsonConf = null;
Boolean reassign = false;
if (options != null) {
if (options.is_set_wait_secs())
wait_amt = options.get_wait_secs();
if (options.is_set_reassign())
reassign = options.is_reassign();
if (options.is_set_conf())
jsonConf = options.get_conf();
}
LOG.info("Begin to rebalance " + topologyName + "wait_time:" + wait_amt + ", reassign: " + reassign + ", new worker/bolt configuration:" + jsonConf);
Map<Object, Object> conf = (Map<Object, Object>) JStormUtils.from_json(jsonConf);
NimbusUtils.transitionName(data, topologyName, true, StatusType.rebalance, wait_amt, reassign, conf);
notifyTopologyActionListener(topologyName, "rebalance");
} catch (NotAliveException e) {
String errMsg = "Rebalance Error, no this topology " + topologyName;
LOG.error(errMsg, e);
throw new NotAliveException(errMsg);
} catch (Exception e) {
String errMsg = "Failed to rebalance topology " + topologyName;
LOG.error(errMsg, e);
throw new TException(errMsg);
}
}
use of org.apache.thrift.TException in project jstorm by alibaba.
the class ServiceHandler method uploadBlobChunk.
@Override
public void uploadBlobChunk(String session, ByteBuffer chunk) throws TException {
AtomicOutputStream os = (AtomicOutputStream) data.getBlobUploaders().get(session);
if (os == null) {
throw new TException("Blob for session " + session + " does not exist (or timed out)");
}
byte[] chunkArray = chunk.array();
int remaining = chunk.remaining();
int arrayOffset = chunk.arrayOffset();
int position = chunk.position();
try {
os.write(chunkArray, (arrayOffset + position), remaining);
data.getBlobUploaders().put(session, os);
} catch (IOException e) {
LOG.error("Blob upload failed", e);
throw new TException(e);
}
}
use of org.apache.thrift.TException in project brisk by riptano.
the class PortfolioMgrHandler method addLossInformation.
private void addLossInformation(List<Portfolio> portfolios) {
Map<ByteBuffer, Portfolio> portfolioLookup = new HashMap<ByteBuffer, Portfolio>();
List<ByteBuffer> portfolioNames = new ArrayList<ByteBuffer>();
for (Portfolio p : portfolios) {
ByteBuffer name = ByteBufferUtil.bytes(p.name);
portfolioLookup.put(name, p);
portfolioNames.add(name);
}
try {
Map<ByteBuffer, List<ColumnOrSuperColumn>> result = getClient().multiget_slice(portfolioNames, lcp, lcols, ConsistencyLevel.ONE);
for (Map.Entry<ByteBuffer, List<ColumnOrSuperColumn>> entry : result.entrySet()) {
Portfolio portfolio = portfolioLookup.get(entry.getKey());
if (portfolio == null)
continue;
for (ColumnOrSuperColumn col : entry.getValue()) {
if (col.getColumn().name.equals(lossCol))
portfolio.setLargest_10day_loss(Double.valueOf(ByteBufferUtil.string(col.getColumn().value)));
if (col.getColumn().name.equals(lossDateCol))
portfolio.setLargest_10day_loss_date(ByteBufferUtil.string(col.getColumn().value));
}
}
} catch (InvalidRequestException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TimedOutException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CharacterCodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of org.apache.thrift.TException in project brisk by riptano.
the class PortfolioMgrHandler method addHistInformation.
private void addHistInformation(List<Portfolio> portfolios) {
for (Portfolio p : portfolios) {
ByteBuffer name = ByteBufferUtil.bytes(p.name);
List<ByteBuffer> tickers = new ArrayList<ByteBuffer>();
for (Position position : p.constituents) {
tickers.add(ByteBufferUtil.bytes(position.ticker));
}
try {
Map<ByteBuffer, List<ColumnOrSuperColumn>> result = getClient().multiget_slice(tickers, hcp, hsp, ConsistencyLevel.ONE);
Map<String, Double> histPrices = new LinkedHashMap<String, Double>();
for (Map.Entry<ByteBuffer, List<ColumnOrSuperColumn>> entry : result.entrySet()) {
for (ColumnOrSuperColumn col : entry.getValue()) {
Double price = histPrices.get(ByteBufferUtil.string(col.column.name));
if (price == null)
price = 0.0;
price = +Double.valueOf(ByteBufferUtil.string(col.column.value));
histPrices.put(ByteBufferUtil.string(col.column.name), price);
}
}
p.setHist_prices(Arrays.asList(histPrices.values().toArray(new Double[] {})));
} catch (InvalidRequestException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TimedOutException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CharacterCodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
use of org.apache.thrift.TException in project lucida by claritylab.
the class QAClient method main.
public static void main(String[] args) {
// Collect the port number.
int port = 8083;
if (args.length >= 1) {
port = Integer.parseInt(args[0]);
}
// User.
String LUCID = "Clinc";
QuerySpec create_spec = new QuerySpec();
// Knowledge.
final QueryInput knowledge_text = createQueryInput("text", "Clinc is created by Jason and Lingjia.", "1234567");
final QueryInput knowledge_url = createQueryInput("url", "https://en.wikipedia.org/wiki/Apple_Inc.", "abcdefg");
final QuerySpec knowledge = createQuerySpec("knowledge", new ArrayList<QueryInput>() {
{
add(knowledge_text);
add(knowledge_url);
}
});
// Unlearn.
final QueryInput knowledge_unlearn_input = createQueryInput("unlearn", "", "abcdefg");
final QuerySpec knowledge_unlearn_spec = createQuerySpec("unlearn knowledge", new ArrayList<QueryInput>() {
{
add(knowledge_unlearn_input);
}
});
// Query.
final QueryInput query_input = createQueryInput("text", "Who created Clinc?", "");
final QuerySpec query = createQuerySpec("query", new ArrayList<QueryInput>() {
{
add(query_input);
}
});
// Initialize thrift objects.
// TTransport transport = new TSocket("clarity08.eecs.umich.edu", port);
TTransport transport = new TSocket("localhost", port);
TProtocol protocol = new TBinaryProtocol(new TFramedTransport(transport));
LucidaService.Client client = new LucidaService.Client(protocol);
try {
// Talk to the server.
transport.open();
System.out.println("///// Connecting to OpenEphyra at port " + port + " ... /////");
// Learn and ask.
client.create(LUCID, create_spec);
client.learn(LUCID, knowledge);
System.out.println("///// Query input: /////");
System.out.println(query_input.data.get(0));
String answer = client.infer(LUCID, query);
// Print the answer.
System.out.println("///// Answer: /////");
System.out.println(answer);
// Unlearn and ask again.
client.learn(LUCID, knowledge_unlearn_spec);
System.out.println("///// Query input: /////");
System.out.println(query_input.data.get(0));
answer = client.infer(LUCID, query);
// Print the answer.
System.out.println("///// Answer: /////");
System.out.println(answer);
transport.close();
} catch (TException x) {
x.printStackTrace();
}
}
Aggregations