use of org.apache.asterix.common.functions.FunctionSignature in project asterixdb by apache.
the class ClauseComparator method visit.
@Override
public Void visit(CallExpr callExpr, Integer step) throws CompilationException {
FunctionSignature signature = callExpr.getFunctionSignature();
if (signature.getNamespace() != null && signature.getNamespace().equals("Metadata") && signature.getName().equals("dataset") && signature.getArity() == 1) {
LiteralExpr expr = (LiteralExpr) callExpr.getExprList().get(0);
out.print(normalize(expr.getValue().getStringValue()));
} else {
printHints(callExpr.getHints(), step);
out.print(generateFullName(callExpr.getFunctionSignature().getNamespace(), callExpr.getFunctionSignature().getName()) + "(");
printDelimitedExpressions(callExpr.getExprList(), COMMA, step);
out.print(")");
}
return null;
}
use of org.apache.asterix.common.functions.FunctionSignature in project asterixdb by apache.
the class AqlDeleteRewriteVisitor method visit.
@Override
public Void visit(DeleteStatement deleteStmt, Void visitArg) {
List<Expression> arguments = new ArrayList<>();
Identifier dataverseName = deleteStmt.getDataverseName();
Identifier datasetName = deleteStmt.getDatasetName();
String arg = dataverseName == null ? datasetName.getValue() : dataverseName.getValue() + "." + datasetName.getValue();
LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
arguments.add(argumentLiteral);
CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
List<Clause> clauseList = new ArrayList<>();
VariableExpr var = deleteStmt.getVariableExpr();
Clause forClause = new ForClause(var, callExpression);
clauseList.add(forClause);
Clause whereClause = null;
Expression condition = deleteStmt.getCondition();
if (condition != null) {
whereClause = new WhereClause(condition);
clauseList.add(whereClause);
}
VariableExpr returnExpr = new VariableExpr(var.getVar());
returnExpr.setIsNewVar(false);
FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, returnExpr);
Query query = new Query(false);
query.setBody(flowgr);
deleteStmt.setQuery(query);
return null;
}
use of org.apache.asterix.common.functions.FunctionSignature in project asterixdb by apache.
the class AqlQueryRewriter method inlineDeclaredUdfs.
private void inlineDeclaredUdfs() throws CompilationException {
if (topStatement == null) {
return;
}
List<FunctionSignature> funIds = new ArrayList<FunctionSignature>();
for (FunctionDecl fdecl : declaredFunctions) {
funIds.add(fdecl.getSignature());
}
List<FunctionDecl> storedFunctionDecls = new ArrayList<>();
for (Expression topLevelExpr : topStatement.getDirectlyEnclosedExpressions()) {
storedFunctionDecls.addAll(FunctionUtil.retrieveUsedStoredFunctions(metadataProvider, topLevelExpr, funIds, null, expr -> getFunctionCalls(expr), func -> functionParser.getFunctionDecl(func), signature -> CommonFunctionMapUtil.normalizeBuiltinFunctionSignature(signature)));
declaredFunctions.addAll(storedFunctionDecls);
}
if (!declaredFunctions.isEmpty()) {
AQLInlineUdfsVisitor visitor = new AQLInlineUdfsVisitor(context, new AQLRewriterFactory(), declaredFunctions, metadataProvider);
while (topStatement.accept(visitor, declaredFunctions)) {
// loop until no more changes
}
}
declaredFunctions.removeAll(storedFunctionDecls);
}
use of org.apache.asterix.common.functions.FunctionSignature in project asterixdb by apache.
the class SubscribeFeedStatement method initialize.
public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
this.query = new Query(false);
EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
if (subscriberFeed == null) {
throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
}
String feedOutputType = getOutputType(mdTxnCtx);
StringBuilder builder = new StringBuilder();
builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
if ((functionsToApply != null) && functionsToApply.isEmpty()) {
builder.append(" return $x");
} else {
Function function;
String rValueName = "x";
String lValueName = "y";
int variableIndex = 0;
for (FunctionSignature appliedFunction : functionsToApply) {
function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
variableIndex++;
switch(function.getLanguage().toUpperCase()) {
case Function.LANGUAGE_AQL:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
case Function.LANGUAGE_JAVA:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
}
builder.append("\n");
}
builder.append("return $" + lValueName + variableIndex);
}
builder.append(")");
builder.append(";");
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Connect feed statement translated to\n" + builder.toString());
}
IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
List<Statement> statements;
try {
statements = parser.parse();
query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
} catch (CompilationException pe) {
throw new MetadataException(pe);
}
}
use of org.apache.asterix.common.functions.FunctionSignature in project asterixdb by apache.
the class MetadataNode method dropDataverse.
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
confirmDataverseCanBeDeleted(jobId, dataverseName);
List<Dataset> dataverseDatasets;
Dataset ds;
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
// Drop all datasets in this dataverse.
for (int i = 0; i < dataverseDatasets.size(); i++) {
ds = dataverseDatasets.get(i);
dropDataset(jobId, dataverseName, ds.getDatasetName());
}
//After dropping datasets, drop datatypes
List<Datatype> dataverseDatatypes;
// As a side effect, acquires an S lock on the 'datatype' dataset
// on behalf of txnId.
dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
// Drop all types in this dataverse.
for (int i = 0; i < dataverseDatatypes.size(); i++) {
forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
}
// As a side effect, acquires an S lock on the 'Function' dataset
// on behalf of txnId.
List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
// Drop all functions in this dataverse.
for (Function function : dataverseFunctions) {
dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
}
// As a side effect, acquires an S lock on the 'Adapter' dataset
// on behalf of txnId.
List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
// Drop all functions in this dataverse.
for (DatasourceAdapter adapter : dataverseAdapters) {
dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
}
List<Feed> dataverseFeeds;
List<FeedConnection> feedConnections;
Feed feed;
dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
// Drop all feeds&connections in this dataverse.
for (int i = 0; i < dataverseFeeds.size(); i++) {
feed = dataverseFeeds.get(i);
feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
for (FeedConnection feedConnection : feedConnections) {
dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
}
dropFeed(jobId, dataverseName, feed.getFeedName());
}
List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
if (feedPolicies != null && feedPolicies.size() > 0) {
// Drop all feed ingestion policies in this dataverse.
for (FeedPolicyEntity feedPolicy : feedPolicies) {
dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
}
}
// Delete the dataverse entry from the 'dataverse' dataset.
ITupleReference searchKey = createTuple(dataverseName);
// As a side effect, acquires an S lock on the 'dataverse' dataset
// on behalf of txnId.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
Aggregations