use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method uploadNewCredentials.
@Override
public void uploadNewCredentials(String topoName, Credentials credentials) throws NotAliveException, InvalidTopologyException, AuthorizationException, TException {
try {
uploadNewCredentialsCalls.mark();
IStormClusterState state = stormClusterState;
String topoId = toTopoId(topoName);
if (topoId == null) {
throw new NotAliveException(topoName + " is not alive");
}
Map<String, Object> topoConf = tryReadTopoConf(topoId, blobStore);
if (credentials == null) {
credentials = new Credentials(Collections.emptyMap());
}
checkAuthorization(topoName, topoConf, "uploadNewCredentials");
synchronized (credUpdateLock) {
state.setCredentials(topoId, credentials, topoConf);
}
} catch (Exception e) {
LOG.warn("Upload Creds topology exception. (topology name='{}')", topoName, e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method downloadBlobChunk.
@SuppressWarnings("deprecation")
@Override
public ByteBuffer downloadBlobChunk(String session) throws AuthorizationException, TException {
try {
BufferInputStream is = blobDownloaders.get(session);
if (is == null) {
throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)");
}
byte[] ret = is.read();
if (ret.length == 0) {
is.close();
blobDownloaders.remove(session);
} else {
blobDownloaders.put(session, is);
}
LOG.debug("Sending {} bytes", ret.length);
return ByteBuffer.wrap(ret);
} catch (Exception e) {
LOG.warn("download blob chunk exception.", e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method finishBlobUpload.
@SuppressWarnings("deprecation")
@Override
public void finishBlobUpload(String session) throws AuthorizationException, TException {
try {
OutputStream os = blobUploaders.get(session);
if (os == null) {
throw new RuntimeException("Blob for session " + session + " does not exist (or timed out)");
}
os.close();
LOG.info("Finished uploading blob for session {}. Closing session.", session);
blobUploaders.remove(session);
} catch (Exception e) {
LOG.warn("finish blob upload exception.", e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method beginBlobDownload.
@SuppressWarnings("deprecation")
@Override
public BeginDownloadResult beginBlobDownload(String key) throws AuthorizationException, KeyNotFoundException, TException {
try {
InputStreamWithMeta is = blobStore.getBlob(key, getSubject());
String sessionId = Utils.uuid();
BeginDownloadResult ret = new BeginDownloadResult(is.getVersion(), sessionId);
ret.set_data_size(is.getFileLength());
blobDownloaders.put(sessionId, new BufferInputStream(is, (int) conf.getOrDefault(Config.STORM_BLOBSTORE_INPUTSTREAM_BUFFER_SIZE_BYTES, 65536)));
LOG.info("Created download session for {}", key);
return ret;
} catch (Exception e) {
LOG.warn("begin blob download exception.", e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method getComponentPageInfo.
@Override
public ComponentPageInfo getComponentPageInfo(String topoId, String componentId, String window, boolean includeSys) throws NotAliveException, AuthorizationException, TException {
try {
getComponentPageInfoCalls.mark();
CommonTopoInfo info = getCommonTopoInfo(topoId, "getComponentPageInfo");
if (info.base == null) {
throw new NotAliveException(topoId);
}
StormTopology topology = info.topology;
Map<String, Object> topoConf = info.topoConf;
Assignment assignment = info.assignment;
Map<List<Long>, List<Object>> exec2NodePort = new HashMap<>();
Map<String, String> nodeToHost;
Map<List<Long>, List<Object>> exec2HostPort = new HashMap<>();
if (assignment != null) {
Map<List<Long>, NodeInfo> execToNodeInfo = assignment.get_executor_node_port();
nodeToHost = assignment.get_node_host();
for (Entry<List<Long>, NodeInfo> entry : execToNodeInfo.entrySet()) {
NodeInfo ni = entry.getValue();
List<Object> nodePort = Arrays.asList(ni.get_node(), ni.get_port_iterator().next());
List<Object> hostPort = Arrays.asList(nodeToHost.get(ni.get_node()), ni.get_port_iterator().next());
exec2NodePort.put(entry.getKey(), nodePort);
exec2HostPort.put(entry.getKey(), hostPort);
}
} else {
nodeToHost = Collections.emptyMap();
}
ComponentPageInfo compPageInfo = StatsUtil.aggCompExecsStats(exec2HostPort, info.taskToComponent, info.beats, window, includeSys, topoId, topology, componentId);
if (compPageInfo.get_component_type() == ComponentType.SPOUT) {
compPageInfo.set_resources_map(setResourcesDefaultIfNotSet(ResourceUtils.getSpoutsResources(topology, topoConf), componentId, topoConf));
} else {
//bolt
compPageInfo.set_resources_map(setResourcesDefaultIfNotSet(ResourceUtils.getBoltsResources(topology, topoConf), componentId, topoConf));
}
compPageInfo.set_topology_name(info.topoName);
compPageInfo.set_errors(stormClusterState.errors(topoId, componentId));
compPageInfo.set_topology_status(extractStatusStr(info.base));
if (info.base.is_set_component_debug()) {
DebugOptions debug = info.base.get_component_debug().get(componentId);
if (debug != null) {
compPageInfo.set_debug_options(debug);
}
}
// Add the event logger details.
Map<String, List<Integer>> compToTasks = Utils.reverseMap(info.taskToComponent);
if (compToTasks.containsKey(StormCommon.EVENTLOGGER_COMPONENT_ID)) {
List<Integer> tasks = compToTasks.get(StormCommon.EVENTLOGGER_COMPONENT_ID);
tasks.sort(null);
// Find the task the events from this component route to.
int taskIndex = TupleUtils.chooseTaskIndex(Collections.singletonList(componentId), tasks.size());
int taskId = tasks.get(taskIndex);
String host = null;
Integer port = null;
for (Entry<List<Long>, List<Object>> entry : exec2HostPort.entrySet()) {
int start = entry.getKey().get(0).intValue();
int end = entry.getKey().get(1).intValue();
if (taskId >= start && taskId <= end) {
host = (String) entry.getValue().get(0);
port = ((Number) entry.getValue().get(1)).intValue();
break;
}
}
if (host != null && port != null) {
compPageInfo.set_eventlog_host(host);
compPageInfo.set_eventlog_port(port);
}
}
return compPageInfo;
} catch (Exception e) {
LOG.warn("getComponentPageInfo exception. (topo id='{}')", topoId, e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
Aggregations