Search in sources :

Example 1 with InsertStatement

use of org.apache.asterix.lang.common.statement.InsertStatement in project asterixdb by apache.

the class QueryTranslator method handleInsertUpsertStatement.

public JobSpecification handleInsertUpsertStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery, ResultMetadata outMetadata, Stats stats, boolean compileOnly, String clientContextId, IStatementExecutorContext ctx) throws Exception {
    InsertStatement stmtInsertUpsert = (InsertStatement) stmt;
    String dataverseName = getActiveDataverse(stmtInsertUpsert.getDataverseName());
    final IMetadataLocker locker = new IMetadataLocker() {

        @Override
        public void lock() throws AsterixException {
            MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), dataverseName + "." + stmtInsertUpsert.getDatasetName());
        }

        @Override
        public void unlock() {
            metadataProvider.getLocks().unlock();
        }
    };
    final IStatementCompiler compiler = () -> {
        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        boolean bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        try {
            metadataProvider.setWriteTransaction(true);
            final JobSpecification jobSpec = rewriteCompileInsertUpsert(hcc, metadataProvider, stmtInsertUpsert);
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            return jobSpec;
        } catch (Exception e) {
            if (bActiveTxn) {
                abort(e, e, mdTxnCtx);
            }
            throw e;
        }
    };
    if (compileOnly) {
        locker.lock();
        try {
            return compiler.compile();
        } finally {
            locker.unlock();
        }
    }
    if (stmtInsertUpsert.getReturnExpression() != null) {
        deliverResult(hcc, hdc, compiler, metadataProvider, locker, resultDelivery, outMetadata, stats, clientContextId, ctx);
    } else {
        locker.lock();
        try {
            final JobSpecification jobSpec = compiler.compile();
            if (jobSpec == null) {
                return jobSpec;
            }
            JobUtils.runJob(hcc, jobSpec, true);
        } finally {
            locker.unlock();
        }
    }
    return null;
}
Also used : MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) JobSpecification(org.apache.hyracks.api.job.JobSpecification) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 2 with InsertStatement

use of org.apache.asterix.lang.common.statement.InsertStatement in project asterixdb by apache.

the class AbstractLangTranslator method validateOperation.

public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
    if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        try {
            ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
        } catch (HyracksDataException e) {
            throw new AsterixException(e);
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
            throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
        } else {
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
            }
        }
    }
    if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
        throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
    }
    if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        int waitCycleCount = 0;
        try {
            while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
                Thread.sleep(1000);
                waitCycleCount++;
            }
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
            throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
        }
    }
    boolean invalidOperation = false;
    String message = null;
    String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
    switch(stmt.getKind()) {
        case Statement.Kind.INSERT:
            InsertStatement insertStmt = (InsertStatement) stmt;
            if (insertStmt.getDataverseName() != null) {
                dataverse = insertStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DELETE:
            DeleteStatement deleteStmt = (DeleteStatement) stmt;
            if (deleteStmt.getDataverseName() != null) {
                dataverse = deleteStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATAVERSE_DROP:
            DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
            if (invalidOperation) {
                message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
            }
            break;
        case Statement.Kind.DATASET_DROP:
            DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
            if (dropStmt.getDataverseName() != null) {
                dataverse = dropStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATASET_DECL:
            DatasetDecl datasetStmt = (DatasetDecl) stmt;
            Map<String, String> hints = datasetStmt.getHints();
            if (hints != null && !hints.isEmpty()) {
                Pair<Boolean, String> validationResult = null;
                StringBuffer errorMsgBuffer = new StringBuffer();
                for (Entry<String, String> hint : hints.entrySet()) {
                    validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                    if (!validationResult.first) {
                        errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
                        errorMsgBuffer.append(" \n");
                    }
                }
                invalidOperation = errorMsgBuffer.length() > 0;
                if (invalidOperation) {
                    message = errorMsgBuffer.toString();
                }
            }
            break;
        default:
            break;
    }
    if (invalidOperation) {
        throw new AsterixException("Invalid operation - " + message);
    }
}
Also used : DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 3 with InsertStatement

use of org.apache.asterix.lang.common.statement.InsertStatement in project asterixdb by apache.

the class QueryTranslator method rewriteCompileInsertUpsert.

private JobSpecification rewriteCompileInsertUpsert(IClusterInfoCollector clusterInfoCollector, MetadataProvider metadataProvider, InsertStatement insertUpsert) throws RemoteException, AlgebricksException, ACIDException {
    // Insert/upsert statement rewriting (happens under the same ongoing metadata transaction)
    Pair<IReturningStatement, Integer> rewrittenResult = apiFramework.reWriteQuery(declaredFunctions, metadataProvider, insertUpsert, sessionOutput);
    InsertStatement rewrittenInsertUpsert = (InsertStatement) rewrittenResult.first;
    String dataverseName = getActiveDataverse(rewrittenInsertUpsert.getDataverseName());
    String datasetName = rewrittenInsertUpsert.getDatasetName().getValue();
    CompiledInsertStatement clfrqs;
    switch(insertUpsert.getKind()) {
        case Statement.Kind.INSERT:
            clfrqs = new CompiledInsertStatement(dataverseName, datasetName, rewrittenInsertUpsert.getQuery(), rewrittenInsertUpsert.getVarCounter(), rewrittenInsertUpsert.getVar(), rewrittenInsertUpsert.getReturnExpression());
            break;
        case Statement.Kind.UPSERT:
            clfrqs = new CompiledUpsertStatement(dataverseName, datasetName, rewrittenInsertUpsert.getQuery(), rewrittenInsertUpsert.getVarCounter(), rewrittenInsertUpsert.getVar(), rewrittenInsertUpsert.getReturnExpression());
            break;
        default:
            throw new AlgebricksException("Unsupported statement type " + rewrittenInsertUpsert.getKind());
    }
    // Insert/upsert statement compilation (happens under the same ongoing metadata transaction)
    return apiFramework.compileQuery(clusterInfoCollector, metadataProvider, rewrittenInsertUpsert.getQuery(), rewrittenResult.second, datasetName, sessionOutput, clfrqs);
}
Also used : CompiledUpsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IReturningStatement(org.apache.asterix.lang.common.base.IReturningStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement)

Example 4 with InsertStatement

use of org.apache.asterix.lang.common.statement.InsertStatement in project asterixdb by apache.

the class QueryTranslator method compileAndExecute.

@Override
public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery, ResultMetadata outMetadata, Stats stats, String clientContextId, IStatementExecutorContext ctx) throws Exception {
    int resultSetIdCounter = 0;
    FileSplit outputFile = null;
    IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
    IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
    Map<String, String> config = new HashMap<>();
    /* Since the system runs a large number of threads, when HTTP requests don't return, it becomes difficult to
         * find the thread running the request to determine where it has stopped.
         * Setting the thread name helps make that easier
         */
    String threadName = Thread.currentThread().getName();
    Thread.currentThread().setName(QueryTranslator.class.getSimpleName());
    try {
        for (Statement stmt : statements) {
            if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
                sessionOutput.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
            }
            validateOperation(appCtx, activeDataverse, stmt);
            // Rewrite the statement's AST.
            rewriteStatement(stmt);
            MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse, componentProvider);
            metadataProvider.setWriterFactory(writerFactory);
            metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
            metadataProvider.setOutputFile(outputFile);
            metadataProvider.setConfig(config);
            switch(stmt.getKind()) {
                case Statement.Kind.SET:
                    handleSetStatement(stmt, config);
                    break;
                case Statement.Kind.DATAVERSE_DECL:
                    activeDataverse = handleUseDataverseStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_DATAVERSE:
                    handleCreateDataverseStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DATASET_DECL:
                    handleCreateDatasetStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.CREATE_INDEX:
                    handleCreateIndexStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.TYPE_DECL:
                    handleCreateTypeStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.NODEGROUP_DECL:
                    handleCreateNodeGroupStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DATAVERSE_DROP:
                    handleDataverseDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.DATASET_DROP:
                    handleDatasetDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.INDEX_DROP:
                    handleIndexDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.TYPE_DROP:
                    handleTypeDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.NODEGROUP_DROP:
                    handleNodegroupDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_FUNCTION:
                    handleCreateFunctionStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.FUNCTION_DROP:
                    handleFunctionDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.LOAD:
                    handleLoadStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.INSERT:
                case Statement.Kind.UPSERT:
                    if (((InsertStatement) stmt).getReturnExpression() != null) {
                        metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
                        metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
                    }
                    handleInsertUpsertStatement(metadataProvider, stmt, hcc, hdc, resultDelivery, outMetadata, stats, false, clientContextId, ctx);
                    break;
                case Statement.Kind.DELETE:
                    handleDeleteStatement(metadataProvider, stmt, hcc, false);
                    break;
                case Statement.Kind.CREATE_FEED:
                    handleCreateFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DROP_FEED:
                    handleDropFeedStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.DROP_FEED_POLICY:
                    handleDropFeedPolicyStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CONNECT_FEED:
                    handleConnectFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DISCONNECT_FEED:
                    handleDisconnectFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.START_FEED:
                    handleStartFeedStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.STOP_FEED:
                    handleStopFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_FEED_POLICY:
                    handleCreateFeedPolicyStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.QUERY:
                    metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
                    metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
                    handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery, outMetadata, stats, clientContextId, ctx);
                    break;
                case Statement.Kind.COMPACT:
                    handleCompactStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.EXTERNAL_DATASET_REFRESH:
                    handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.WRITE:
                    Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(stmt);
                    writerFactory = (result.first != null) ? result.first : writerFactory;
                    outputFile = result.second;
                    break;
                case Statement.Kind.RUN:
                    handleRunStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.FUNCTION_DECL:
                    // No op
                    break;
                case Statement.Kind.EXTENSION:
                    ((IExtensionStatement) stmt).handle(this, metadataProvider, hcc, hdc, resultDelivery, stats, resultSetIdCounter);
                    break;
                default:
                    throw new CompilationException("Unknown function");
            }
        }
    } finally {
        Thread.currentThread().setName(threadName);
    }
}
Also used : IExtensionStatement(org.apache.asterix.algebra.extension.IExtensionStatement) CompilationException(org.apache.asterix.common.exceptions.CompilationException) HashMap(java.util.HashMap) IResultSerializerFactoryProvider(org.apache.hyracks.algebricks.data.IResultSerializerFactoryProvider) StopFeedStatement(org.apache.asterix.lang.common.statement.StopFeedStatement) FunctionDropStatement(org.apache.asterix.lang.common.statement.FunctionDropStatement) LoadStatement(org.apache.asterix.lang.common.statement.LoadStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement) CreateDataverseStatement(org.apache.asterix.lang.common.statement.CreateDataverseStatement) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) CompiledLoadFromFileStatement(org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement) CreateFeedPolicyStatement(org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement) CreateIndexStatement(org.apache.asterix.lang.common.statement.CreateIndexStatement) RunStatement(org.apache.asterix.lang.common.statement.RunStatement) IExtensionStatement(org.apache.asterix.algebra.extension.IExtensionStatement) FeedPolicyDropStatement(org.apache.asterix.lang.common.statement.FeedPolicyDropStatement) Statement(org.apache.asterix.lang.common.base.Statement) DisconnectFeedStatement(org.apache.asterix.lang.common.statement.DisconnectFeedStatement) CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) CreateFeedStatement(org.apache.asterix.lang.common.statement.CreateFeedStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) TypeDropStatement(org.apache.asterix.lang.common.statement.TypeDropStatement) CompactStatement(org.apache.asterix.lang.common.statement.CompactStatement) StartFeedStatement(org.apache.asterix.lang.common.statement.StartFeedStatement) NodeGroupDropStatement(org.apache.asterix.lang.common.statement.NodeGroupDropStatement) RefreshExternalDatasetStatement(org.apache.asterix.lang.common.statement.RefreshExternalDatasetStatement) SetStatement(org.apache.asterix.lang.common.statement.SetStatement) CompiledUpsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement) ConnectFeedStatement(org.apache.asterix.lang.common.statement.ConnectFeedStatement) ICompiledDmlStatement(org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement) IndexDropStatement(org.apache.asterix.lang.common.statement.IndexDropStatement) CreateFunctionStatement(org.apache.asterix.lang.common.statement.CreateFunctionStatement) WriteStatement(org.apache.asterix.lang.common.statement.WriteStatement) IReturningStatement(org.apache.asterix.lang.common.base.IReturningStatement) DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) FeedDropStatement(org.apache.asterix.lang.common.statement.FeedDropStatement) FileSplit(org.apache.hyracks.api.io.FileSplit) UnmanagedFileSplit(org.apache.hyracks.api.io.UnmanagedFileSplit) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) ResultSetId(org.apache.hyracks.api.dataset.ResultSetId) IAWriterFactory(org.apache.hyracks.algebricks.data.IAWriterFactory)

Example 5 with InsertStatement

use of org.apache.asterix.lang.common.statement.InsertStatement in project asterixdb by apache.

the class SubscribeFeedStatement method initialize.

public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
    this.query = new Query(false);
    EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
    Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
    if (subscriberFeed == null) {
        throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
    }
    String feedOutputType = getOutputType(mdTxnCtx);
    StringBuilder builder = new StringBuilder();
    builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
    builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
    builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
    builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
    builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
    List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
    if ((functionsToApply != null) && functionsToApply.isEmpty()) {
        builder.append(" return $x");
    } else {
        Function function;
        String rValueName = "x";
        String lValueName = "y";
        int variableIndex = 0;
        for (FunctionSignature appliedFunction : functionsToApply) {
            function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
            variableIndex++;
            switch(function.getLanguage().toUpperCase()) {
                case Function.LANGUAGE_AQL:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
                case Function.LANGUAGE_JAVA:
                    builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
                    rValueName = lValueName + variableIndex;
                    break;
            }
            builder.append("\n");
        }
        builder.append("return $" + lValueName + variableIndex);
    }
    builder.append(")");
    builder.append(";");
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Connect feed statement translated to\n" + builder.toString());
    }
    IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
    List<Statement> statements;
    try {
        statements = parser.parse();
        query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
    } catch (CompilationException pe) {
        throw new MetadataException(pe);
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) Query(org.apache.asterix.lang.common.statement.Query) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) Statement(org.apache.asterix.lang.common.base.Statement) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) MetadataException(org.apache.asterix.metadata.MetadataException) EntityId(org.apache.asterix.active.EntityId) Function(org.apache.asterix.metadata.entities.Function) StringReader(java.io.StringReader) Feed(org.apache.asterix.metadata.entities.Feed) IParser(org.apache.asterix.lang.common.base.IParser)

Aggregations

InsertStatement (org.apache.asterix.lang.common.statement.InsertStatement)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)3 CompiledInsertStatement (org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement)3 AsterixException (org.apache.asterix.common.exceptions.AsterixException)2 IReturningStatement (org.apache.asterix.lang.common.base.IReturningStatement)2 Statement (org.apache.asterix.lang.common.base.Statement)2 DataverseDropStatement (org.apache.asterix.lang.common.statement.DataverseDropStatement)2 DeleteStatement (org.apache.asterix.lang.common.statement.DeleteStatement)2 DropDatasetStatement (org.apache.asterix.lang.common.statement.DropDatasetStatement)2 MetadataException (org.apache.asterix.metadata.MetadataException)2 CompiledUpsertStatement (org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement)2 IOException (java.io.IOException)1 StringReader (java.io.StringReader)1 RemoteException (java.rmi.RemoteException)1 HashMap (java.util.HashMap)1 EntityId (org.apache.asterix.active.EntityId)1 IExtensionStatement (org.apache.asterix.algebra.extension.IExtensionStatement)1 ACIDException (org.apache.asterix.common.exceptions.ACIDException)1 FunctionSignature (org.apache.asterix.common.functions.FunctionSignature)1 IParser (org.apache.asterix.lang.common.base.IParser)1