use of org.apache.thrift.TException in project hive by apache.
the class TBinarySortableProtocol method readString.
@Override
public String readString() throws TException {
if (readIsNull()) {
return null;
}
int i = 0;
while (true) {
readRawAll(bin, 0, 1);
if (bin[0] == 0) {
// End of string.
break;
}
if (bin[0] == 1) {
// Escaped byte, unescape it.
readRawAll(bin, 0, 1);
assert (bin[0] == 1 || bin[0] == 2);
bin[0] = (byte) (bin[0] - 1);
}
if (i == stringBytes.length) {
stringBytes = Arrays.copyOf(stringBytes, stringBytes.length * 2);
}
stringBytes[i] = bin[0];
i++;
}
try {
String r = new String(stringBytes, 0, i, "UTF-8");
return r;
} catch (UnsupportedEncodingException uex) {
throw new TException("JVM DOES NOT SUPPORT UTF-8: ", uex);
}
}
use of org.apache.thrift.TException in project hive by apache.
the class GetFunctionsOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
if (isAuthV2Enabled()) {
// get databases for schema pattern
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
List<String> matchingDbs;
try {
matchingDbs = metastoreClient.getDatabases(schemaPattern);
} catch (TException e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
// authorize this call on the schema objects
List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
authorizeMetaGets(HiveOperationType.GET_FUNCTIONS, privObjs, cmdStr);
}
try {
if ((null == catalogName || "".equals(catalogName)) && (null == schemaName || "".equals(schemaName))) {
Set<String> functionNames = FunctionRegistry.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
for (String functionName : functionNames) {
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
Object[] rowData = new Object[] { // FUNCTION_CAT
null, // FUNCTION_SCHEM
null, // FUNCTION_NAME
functionInfo.getDisplayName(), // REMARKS
"", (functionInfo.isGenericUDTF() ? DatabaseMetaData.functionReturnsTable : // FUNCTION_TYPE
DatabaseMetaData.functionNoTable), functionInfo.getClass().getCanonicalName() };
rowSet.addRow(rowData);
}
}
setState(OperationState.FINISHED);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Testing method completeTopology.
/**
* Run a topology to completion capturing all of the messages that are emitted. This only works when all of the spouts are
* instances of {@link org.apache.storm.testing.CompletableSpout} or are overwritten by MockedSources in param
* @param cluster the cluster to submit the topology to
* @param topology the topology itself
* @param param parameters to describe how to complete a topology.
* @return a map of the component to the list of tuples it emitted.
* @throws InterruptedException
* @throws TException on any error from nimbus.
*/
public static Map<String, List<FixedTuple>> completeTopology(ILocalCluster cluster, StormTopology topology, CompleteTopologyParam param) throws TException, InterruptedException {
Map<String, List<FixedTuple>> ret = null;
IStormClusterState state = cluster.getClusterState();
CapturedTopology<StormTopology> capTopo = captureTopology(topology);
topology = capTopo.topology;
String topoName = param.getTopologyName();
if (topoName == null) {
topoName = "topologytest-" + Utils.uuid();
}
Map<String, SpoutSpec> spouts = topology.get_spouts();
MockedSources ms = param.getMockedSources();
if (ms != null) {
for (Entry<String, List<FixedTuple>> mocked : ms.getData().entrySet()) {
FixedTupleSpout newSpout = new FixedTupleSpout(mocked.getValue());
spouts.get(mocked.getKey()).set_spout_object(Thrift.serializeComponentObject(newSpout));
}
}
List<Object> spoutObjects = spouts.values().stream().map((spec) -> Thrift.deserializeComponentObject(spec.get_spout_object())).collect(Collectors.toList());
for (Object o : spoutObjects) {
if (!(o instanceof CompletableSpout)) {
throw new RuntimeException("Cannot complete topology unless every spout is a CompletableSpout (or mocked to be); failed by " + o);
}
}
for (Object spout : spoutObjects) {
((CompletableSpout) spout).startup();
}
cluster.submitTopology(topoName, param.getStormConf(), topology);
if (Time.isSimulating()) {
cluster.advanceClusterTime(11);
}
String topoId = state.getTopoId(topoName).get();
//Give the topology time to come up without using it to wait for the spouts to complete
simulateWait(cluster);
Integer timeoutMs = param.getTimeoutMs();
if (timeoutMs == null) {
timeoutMs = TEST_TIMEOUT_MS;
}
whileTimeout(timeoutMs, () -> !isEvery(spoutObjects, (o) -> ((CompletableSpout) o).isExhausted()), () -> {
try {
simulateWait(cluster);
} catch (Exception e) {
throw new RuntimeException();
}
});
KillOptions killOpts = new KillOptions();
killOpts.set_wait_secs(0);
cluster.killTopologyWithOpts(topoName, killOpts);
whileTimeout(timeoutMs, () -> state.assignmentInfo(topoId, null) != null, () -> {
try {
simulateWait(cluster);
} catch (Exception e) {
throw new RuntimeException();
}
});
if (param.getCleanupState()) {
for (Object o : spoutObjects) {
((CompletableSpout) o).clean();
}
ret = capTopo.capturer.getAndRemoveResults();
} else {
ret = capTopo.capturer.getAndClearResults();
}
return ret;
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method uploadChunk.
@SuppressWarnings("deprecation")
@Override
public void uploadChunk(String location, ByteBuffer chunk) throws AuthorizationException, TException {
try {
uploadChunkCalls.mark();
checkAuthorization(null, null, "fileUpload");
WritableByteChannel channel = uploaders.get(location);
if (channel == null) {
throw new RuntimeException("File for that location does not exist (or timed out)");
}
channel.write(chunk);
uploaders.put(location, channel);
} catch (Exception e) {
LOG.warn("uploadChunk exception.", e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
use of org.apache.thrift.TException in project storm by apache.
the class Nimbus method uploadNewCredentials.
@Override
public void uploadNewCredentials(String topoName, Credentials credentials) throws NotAliveException, InvalidTopologyException, AuthorizationException, TException {
try {
uploadNewCredentialsCalls.mark();
IStormClusterState state = stormClusterState;
String topoId = toTopoId(topoName);
if (topoId == null) {
throw new NotAliveException(topoName + " is not alive");
}
Map<String, Object> topoConf = tryReadTopoConf(topoId, blobStore);
if (credentials == null) {
credentials = new Credentials(Collections.emptyMap());
}
checkAuthorization(topoName, topoConf, "uploadNewCredentials");
synchronized (credUpdateLock) {
state.setCredentials(topoId, credentials, topoConf);
}
} catch (Exception e) {
LOG.warn("Upload Creds topology exception. (topology name='{}')", topoName, e);
if (e instanceof TException) {
throw (TException) e;
}
throw new RuntimeException(e);
}
}
Aggregations