use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class SocketServerInputStreamFactory method configure.
@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException, CompilationException {
try {
sockets = new ArrayList<>();
String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
if (modeValue != null) {
mode = Mode.valueOf(modeValue.trim().toUpperCase());
}
String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
if (socketsValue == null) {
throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
}
Map<InetAddress, Set<String>> ncMap;
ncMap = RuntimeUtils.getNodeControllerMap((ICcApplicationContext) serviceCtx.getApplicationContext());
List<String> ncs = RuntimeUtils.getAllNodeControllers((ICcApplicationContext) serviceCtx.getApplicationContext());
String[] socketsArray = socketsValue.split(",");
Random random = new Random();
for (String socket : socketsArray) {
String[] socketTokens = socket.split(":");
String host = socketTokens[0].trim();
int port = Integer.parseInt(socketTokens[1].trim());
Pair<String, Integer> p = null;
switch(mode) {
case IP:
Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
if ((ncsOnIp == null) || ncsOnIp.isEmpty()) {
throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "host", host, StringUtils.join(ncMap.keySet(), ", "));
}
String[] ncArray = ncsOnIp.toArray(new String[] {});
String nc = ncArray[random.nextInt(ncArray.length)];
p = new Pair<>(nc, port);
break;
case NC:
p = new Pair<>(host, port);
if (!ncs.contains(host)) {
throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "NC", host, StringUtils.join(ncs, ", "));
}
break;
}
sockets.add(p);
}
} catch (CompilationException e) {
throw e;
} catch (HyracksDataException | UnknownHostException e) {
throw new AsterixException(e);
} catch (Exception e) {
throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class SimilarityJaccardPrefixEvaluator method evaluate.
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
// similarity threshold
sim = 0;
evalThreshold.evaluate(tuple, inputVal);
float similarityThreshold = AFloatSerializerDeserializer.getFloat(inputVal.getByteArray(), inputVal.getStartOffset() + 1);
if (similarityThreshold != similarityThresholdCache || similarityFilters == null) {
similarityFilters = new SimilarityFiltersJaccard(similarityThreshold);
similarityThresholdCache = similarityThreshold;
}
evalLen1.evaluate(tuple, inputVal);
int length1 = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 0, inputVal.getByteArray(), inputVal.getStartOffset());
evalLen2.evaluate(tuple, inputVal);
int length2 = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 2, inputVal.getByteArray(), inputVal.getStartOffset());
//
if (similarityFilters.passLengthFilter(length1, length2)) {
// -- - tokens1 - --
int i;
tokens1.reset();
evalTokens1.evaluate(tuple, inputVal);
byte[] serList = inputVal.getByteArray();
int startOffset = inputVal.getStartOffset();
if (serList[startOffset] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && serList[startOffset] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new TypeMismatchException(BuiltinFunctions.SIMILARITY_JACCARD, 1, serList[startOffset], ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG, ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG);
}
int lengthTokens1;
if (serList[startOffset] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
lengthTokens1 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
// read tokens
for (i = 0; i < lengthTokens1; i++) {
int itemOffset;
int token;
try {
itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 1, serList, itemOffset, startOffset + 1);
tokens1.add(token);
}
} else {
lengthTokens1 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
// read tokens
for (i = 0; i < lengthTokens1; i++) {
int itemOffset;
int token;
try {
itemOffset = AUnorderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 1, serList, itemOffset, startOffset + 1);
tokens1.add(token);
}
}
// pad tokens
for (; i < length1; i++) {
tokens1.add(Integer.MAX_VALUE);
}
// -- - tokens2 - --
tokens2.reset();
evalTokens2.evaluate(tuple, inputVal);
serList = inputVal.getByteArray();
startOffset = inputVal.getStartOffset();
if (serList[startOffset] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && serList[startOffset] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new TypeMismatchException(BuiltinFunctions.SIMILARITY_JACCARD, 3, serList[startOffset], ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG, ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG);
}
int lengthTokens2;
if (serList[startOffset] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
lengthTokens2 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
// read tokens
for (i = 0; i < lengthTokens2; i++) {
int itemOffset;
int token;
try {
itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 3, serList, itemOffset, startOffset + 1);
tokens2.add(token);
}
} else {
lengthTokens2 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
// read tokens
for (i = 0; i < lengthTokens2; i++) {
int itemOffset;
int token;
try {
itemOffset = AUnorderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 3, serList, itemOffset, startOffset + 1);
tokens2.add(token);
}
}
// pad tokens
for (; i < length2; i++) {
tokens2.add(Integer.MAX_VALUE);
}
// -- - token prefix - --
evalTokenPrefix.evaluate(tuple, inputVal);
int tokenPrefix = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 4, inputVal.getByteArray(), inputVal.getStartOffset());
//
// -- - position filter - --
//
SimilarityMetric.getPartialIntersectSize(tokens1.get(), 0, tokens1.length(), tokens2.get(), 0, tokens2.length(), tokenPrefix, parInter);
if (similarityFilters.passPositionFilter(parInter.intersectSize, parInter.posXStop, length1, parInter.posYStop, length2)) {
//
if (similarityFilters.passSuffixFilter(tokens1.get(), 0, tokens1.length(), parInter.posXStart, tokens2.get(), 0, tokens2.length(), parInter.posYStart)) {
sim = similarityFilters.passSimilarityFilter(tokens1.get(), 0, tokens1.length(), parInter.posXStop + 1, tokens2.get(), 0, tokens2.length(), parInter.posYStop + 1, parInter.intersectSize);
}
}
}
try {
writeResult();
} catch (IOException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class AbstractAsterixListIterator method next.
@Override
public void next() throws HyracksDataException {
try {
pos = nextPos;
++count;
nextPos = startOff + listLength;
if (count + 1 < numberOfItems) {
nextPos = getItemOffset(data, startOff, count + 1);
}
itemLen = nextPos - pos;
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class AbstractLangTranslator method validateOperation.
public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
try {
ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
} catch (HyracksDataException e) {
throw new AsterixException(e);
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
} else {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
}
}
}
if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
int waitCycleCount = 0;
try {
while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
Thread.sleep(1000);
waitCycleCount++;
}
} catch (InterruptedException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
}
Thread.currentThread().interrupt();
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
}
}
boolean invalidOperation = false;
String message = null;
String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
switch(stmt.getKind()) {
case Statement.Kind.INSERT:
InsertStatement insertStmt = (InsertStatement) stmt;
if (insertStmt.getDataverseName() != null) {
dataverse = insertStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DELETE:
DeleteStatement deleteStmt = (DeleteStatement) stmt;
if (deleteStmt.getDataverseName() != null) {
dataverse = deleteStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATAVERSE_DROP:
DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
if (invalidOperation) {
message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
}
break;
case Statement.Kind.DATASET_DROP:
DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
if (dropStmt.getDataverseName() != null) {
dataverse = dropStmt.getDataverseName().getValue();
}
invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
if (invalidOperation) {
message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
}
break;
case Statement.Kind.DATASET_DECL:
DatasetDecl datasetStmt = (DatasetDecl) stmt;
Map<String, String> hints = datasetStmt.getHints();
if (hints != null && !hints.isEmpty()) {
Pair<Boolean, String> validationResult = null;
StringBuffer errorMsgBuffer = new StringBuffer();
for (Entry<String, String> hint : hints.entrySet()) {
validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
if (!validationResult.first) {
errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
errorMsgBuffer.append(" \n");
}
}
invalidOperation = errorMsgBuffer.length() > 0;
if (invalidOperation) {
message = errorMsgBuffer.toString();
}
}
break;
default:
break;
}
if (invalidOperation) {
throw new AsterixException("Invalid operation - " + message);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class ApiServlet method post.
@Override
protected void post(IServletRequest request, IServletResponse response) {
// Query language
ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ? aqlCompilationProvider : sqlppCompilationProvider;
IParserFactory parserFactory = compilationProvider.getParserFactory();
// Output format.
PrintWriter out = response.writer();
OutputFormat format;
boolean csvAndHeader = false;
String output = request.getParameter("output-format");
try {
format = OutputFormat.valueOf(output);
} catch (IllegalArgumentException e) {
LOGGER.log(Level.INFO, output + ": unsupported output-format, using " + OutputFormat.CLEAN_JSON + " instead", e);
// Default output format
format = OutputFormat.CLEAN_JSON;
}
String query = request.getParameter("query");
String wrapperArray = request.getParameter("wrapper-array");
String printExprParam = request.getParameter("print-expr-tree");
String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
String printLogicalPlanParam = request.getParameter("print-logical-plan");
String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
String printJob = request.getParameter("print-job");
String executeQuery = request.getParameter("execute-query");
try {
response.setStatus(HttpResponseStatus.OK);
HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failure setting content type", e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
return;
}
try {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true);
sessionConfig.set(SessionConfig.FORMAT_HTML, true);
sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
double duration;
long startTime = System.currentTimeMillis();
translator.compileAndExecute(hcc, hds, IStatementExecutor.ResultDelivery.IMMEDIATE, null, new IStatementExecutor.Stats());
long endTime = System.currentTimeMillis();
duration = (endTime - startTime) / 1000.00;
out.println(HTML_STATEMENT_SEPARATOR);
out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
ResultUtil.webUIParseExceptionHandler(out, pe, query);
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
ResultUtil.webUIErrorHandler(out, e);
}
}
Aggregations