Search in sources :

Example 26 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method validateCompactionPolicy.

protected static void validateCompactionPolicy(String compactionPolicy, Map<String, String> compactionPolicyProperties, MetadataTransactionContext mdTxnCtx, boolean isExternalDataset) throws CompilationException, Exception {
    CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
    if (compactionPolicyEntity == null) {
        throw new CompilationException("Unknown compaction policy: " + compactionPolicy);
    }
    String compactionPolicyFactoryClassName = compactionPolicyEntity.getClassName();
    ILSMMergePolicyFactory mergePolicyFactory = (ILSMMergePolicyFactory) Class.forName(compactionPolicyFactoryClassName).newInstance();
    if (isExternalDataset && mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
        throw new CompilationException("The correlated-prefix merge policy cannot be used with external dataset.");
    }
    if (compactionPolicyProperties == null) {
        if (mergePolicyFactory.getName().compareTo("no-merge") != 0) {
            throw new CompilationException("Compaction policy properties are missing.");
        }
    } else {
        for (Map.Entry<String, String> entry : compactionPolicyProperties.entrySet()) {
            if (!mergePolicyFactory.getPropertiesNames().contains(entry.getKey())) {
                throw new CompilationException("Invalid compaction policy property: " + entry.getKey());
            }
        }
        for (String p : mergePolicyFactory.getPropertiesNames()) {
            if (!compactionPolicyProperties.containsKey(p)) {
                throw new CompilationException("Missing compaction policy property: " + p);
            }
        }
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) CompactionPolicy(org.apache.asterix.metadata.entities.CompactionPolicy) Map(java.util.Map) BuiltinTypeMap(org.apache.asterix.metadata.entities.BuiltinTypeMap) HashMap(java.util.HashMap) ILSMMergePolicyFactory(org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory)

Example 27 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method compileAndExecute.

@Override
public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery, ResultMetadata outMetadata, Stats stats, String clientContextId, IStatementExecutorContext ctx) throws Exception {
    int resultSetIdCounter = 0;
    FileSplit outputFile = null;
    IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
    IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
    Map<String, String> config = new HashMap<>();
    /* Since the system runs a large number of threads, when HTTP requests don't return, it becomes difficult to
         * find the thread running the request to determine where it has stopped.
         * Setting the thread name helps make that easier
         */
    String threadName = Thread.currentThread().getName();
    Thread.currentThread().setName(QueryTranslator.class.getSimpleName());
    try {
        for (Statement stmt : statements) {
            if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
                sessionOutput.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
            }
            validateOperation(appCtx, activeDataverse, stmt);
            // Rewrite the statement's AST.
            rewriteStatement(stmt);
            MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse, componentProvider);
            metadataProvider.setWriterFactory(writerFactory);
            metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
            metadataProvider.setOutputFile(outputFile);
            metadataProvider.setConfig(config);
            switch(stmt.getKind()) {
                case Statement.Kind.SET:
                    handleSetStatement(stmt, config);
                    break;
                case Statement.Kind.DATAVERSE_DECL:
                    activeDataverse = handleUseDataverseStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_DATAVERSE:
                    handleCreateDataverseStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DATASET_DECL:
                    handleCreateDatasetStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.CREATE_INDEX:
                    handleCreateIndexStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.TYPE_DECL:
                    handleCreateTypeStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.NODEGROUP_DECL:
                    handleCreateNodeGroupStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DATAVERSE_DROP:
                    handleDataverseDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.DATASET_DROP:
                    handleDatasetDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.INDEX_DROP:
                    handleIndexDropStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.TYPE_DROP:
                    handleTypeDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.NODEGROUP_DROP:
                    handleNodegroupDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_FUNCTION:
                    handleCreateFunctionStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.FUNCTION_DROP:
                    handleFunctionDropStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.LOAD:
                    handleLoadStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.INSERT:
                case Statement.Kind.UPSERT:
                    if (((InsertStatement) stmt).getReturnExpression() != null) {
                        metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
                        metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
                    }
                    handleInsertUpsertStatement(metadataProvider, stmt, hcc, hdc, resultDelivery, outMetadata, stats, false, clientContextId, ctx);
                    break;
                case Statement.Kind.DELETE:
                    handleDeleteStatement(metadataProvider, stmt, hcc, false);
                    break;
                case Statement.Kind.CREATE_FEED:
                    handleCreateFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DROP_FEED:
                    handleDropFeedStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.DROP_FEED_POLICY:
                    handleDropFeedPolicyStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CONNECT_FEED:
                    handleConnectFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.DISCONNECT_FEED:
                    handleDisconnectFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.START_FEED:
                    handleStartFeedStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.STOP_FEED:
                    handleStopFeedStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.CREATE_FEED_POLICY:
                    handleCreateFeedPolicyStatement(metadataProvider, stmt);
                    break;
                case Statement.Kind.QUERY:
                    metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
                    metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED);
                    handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery, outMetadata, stats, clientContextId, ctx);
                    break;
                case Statement.Kind.COMPACT:
                    handleCompactStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.EXTERNAL_DATASET_REFRESH:
                    handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.WRITE:
                    Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(stmt);
                    writerFactory = (result.first != null) ? result.first : writerFactory;
                    outputFile = result.second;
                    break;
                case Statement.Kind.RUN:
                    handleRunStatement(metadataProvider, stmt, hcc);
                    break;
                case Statement.Kind.FUNCTION_DECL:
                    // No op
                    break;
                case Statement.Kind.EXTENSION:
                    ((IExtensionStatement) stmt).handle(this, metadataProvider, hcc, hdc, resultDelivery, stats, resultSetIdCounter);
                    break;
                default:
                    throw new CompilationException("Unknown function");
            }
        }
    } finally {
        Thread.currentThread().setName(threadName);
    }
}
Also used : IExtensionStatement(org.apache.asterix.algebra.extension.IExtensionStatement) CompilationException(org.apache.asterix.common.exceptions.CompilationException) HashMap(java.util.HashMap) IResultSerializerFactoryProvider(org.apache.hyracks.algebricks.data.IResultSerializerFactoryProvider) StopFeedStatement(org.apache.asterix.lang.common.statement.StopFeedStatement) FunctionDropStatement(org.apache.asterix.lang.common.statement.FunctionDropStatement) LoadStatement(org.apache.asterix.lang.common.statement.LoadStatement) CompiledInsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement) CreateDataverseStatement(org.apache.asterix.lang.common.statement.CreateDataverseStatement) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) CompiledLoadFromFileStatement(org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement) CreateFeedPolicyStatement(org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement) CreateIndexStatement(org.apache.asterix.lang.common.statement.CreateIndexStatement) RunStatement(org.apache.asterix.lang.common.statement.RunStatement) IExtensionStatement(org.apache.asterix.algebra.extension.IExtensionStatement) FeedPolicyDropStatement(org.apache.asterix.lang.common.statement.FeedPolicyDropStatement) Statement(org.apache.asterix.lang.common.base.Statement) DisconnectFeedStatement(org.apache.asterix.lang.common.statement.DisconnectFeedStatement) CompiledDeleteStatement(org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement) CreateFeedStatement(org.apache.asterix.lang.common.statement.CreateFeedStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) TypeDropStatement(org.apache.asterix.lang.common.statement.TypeDropStatement) CompactStatement(org.apache.asterix.lang.common.statement.CompactStatement) StartFeedStatement(org.apache.asterix.lang.common.statement.StartFeedStatement) NodeGroupDropStatement(org.apache.asterix.lang.common.statement.NodeGroupDropStatement) RefreshExternalDatasetStatement(org.apache.asterix.lang.common.statement.RefreshExternalDatasetStatement) SetStatement(org.apache.asterix.lang.common.statement.SetStatement) CompiledUpsertStatement(org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement) ConnectFeedStatement(org.apache.asterix.lang.common.statement.ConnectFeedStatement) ICompiledDmlStatement(org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement) IndexDropStatement(org.apache.asterix.lang.common.statement.IndexDropStatement) CreateFunctionStatement(org.apache.asterix.lang.common.statement.CreateFunctionStatement) WriteStatement(org.apache.asterix.lang.common.statement.WriteStatement) IReturningStatement(org.apache.asterix.lang.common.base.IReturningStatement) DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) FeedDropStatement(org.apache.asterix.lang.common.statement.FeedDropStatement) FileSplit(org.apache.hyracks.api.io.FileSplit) UnmanagedFileSplit(org.apache.hyracks.api.io.UnmanagedFileSplit) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) ResultSetId(org.apache.hyracks.api.dataset.ResultSetId) IAWriterFactory(org.apache.hyracks.algebricks.data.IAWriterFactory)

Example 28 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method handleCreateDatasetStatement.

public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
    String itemTypeName = dd.getItemTypeName().getValue();
    String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
    String metaItemTypeName = dd.getMetaItemTypeName().getValue();
    Identifier ngNameId = dd.getNodegroupName();
    String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
    String compactionPolicy = dd.getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = compactionPolicy == null;
    boolean temp = dd.getDatasetDetailsDecl().isTemp();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;
    try {
        IDatasetDetails datasetDetails = null;
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
        if (compactionPolicy == null) {
            compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
        } else {
            validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
        }
        switch(dd.getDatasetType()) {
            case INTERNAL:
                IAType itemType = dt.getDatatype();
                if (itemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset type has to be a record type.");
                }
                IAType metaItemType = null;
                if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
                    metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
                }
                if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset meta type has to be a record type.");
                }
                ARecordType metaRecType = (ARecordType) metaItemType;
                List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
                List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
                boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
                ARecordType aRecordType = (ARecordType) itemType;
                List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
                List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
                if (filterField != null) {
                    ValidateUtil.validateFilterField(aRecordType, filterField);
                }
                if (compactionPolicy == null && filterField != null) {
                    // If the dataset has a filter and the user didn't specify a merge
                    // policy, then we will pick the
                    // correlated-prefix as the default merge policy.
                    compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                }
                datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
                break;
            case EXTERNAL:
                String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
                Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
                datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
                break;
            default:
                throw new CompilationException("Unknown datatype " + dd.getDatasetType());
        }
        // #. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }
        // #. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
            // #. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
            // #. runJob
            JobUtils.runJob(hcc, jobSpec, true);
            // #. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }
        // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
        dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            // [Notice]
            // As long as we updated(and committed) metadata, we should remove any effect of the job
            // because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ExternalDetailsDecl(org.apache.asterix.lang.common.statement.ExternalDetailsDecl) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) Datatype(org.apache.asterix.metadata.entities.Datatype) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 29 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class QueryTranslator method prepareRunExternalRuntime.

// Prepares to run a program on external runtime.
protected void prepareRunExternalRuntime(MetadataProvider metadataProvider, IHyracksClientConnection hcc, RunStatement pregelixStmt, String dataverseNameFrom, String dataverseNameTo, String datasetNameFrom, String datasetNameTo, MetadataTransactionContext mdTxnCtx) throws Exception {
    // Validates the source/sink dataverses and datasets.
    Dataset fromDataset = metadataProvider.findDataset(dataverseNameFrom, datasetNameFrom);
    if (fromDataset == null) {
        throw new CompilationException("The source dataset " + datasetNameFrom + " in dataverse " + dataverseNameFrom + " could not be found for the Run command");
    }
    Dataset toDataset = metadataProvider.findDataset(dataverseNameTo, datasetNameTo);
    if (toDataset == null) {
        throw new CompilationException("The sink dataset " + datasetNameTo + " in dataverse " + dataverseNameTo + " could not be found for the Run command");
    }
    try {
        // Find the primary index of the sink dataset.
        Index toIndex = null;
        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseNameTo, pregelixStmt.getDatasetNameTo().getValue());
        for (Index index : indexes) {
            if (index.isPrimaryIndex()) {
                toIndex = index;
                break;
            }
        }
        if (toIndex == null) {
            throw new AlgebricksException("Tried to access non-existing dataset: " + datasetNameTo);
        }
        // Cleans up the sink dataset -- Drop and then Create.
        DropDatasetStatement dropStmt = new DropDatasetStatement(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), true);
        this.handleDatasetDropStatement(metadataProvider, dropStmt, hcc);
        IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(), toIndex.getKeyFieldSourceIndicators(), false, null, toDataset.getDatasetDetails().isTemp());
        DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo), pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeDataverseName()), new Identifier(toDataset.getItemTypeName()), new Identifier(toDataset.getMetaItemTypeDataverseName()), new Identifier(toDataset.getMetaItemTypeName()), new Identifier(toDataset.getNodeGroupName()), toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(), toDataset.getDatasetType(), idd, false);
        this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc);
    } catch (Exception e) {
        LOGGER.log(Level.WARNING, e.getMessage(), e);
        throw new AlgebricksException("Error cleaning the result dataset. This should not happen.");
    }
    // Flushes source dataset.
    FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseNameFrom, datasetNameFrom, datasetNameFrom);
}
Also used : IDatasetDetailsDecl(org.apache.asterix.lang.common.statement.IDatasetDetailsDecl) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) CompilationException(org.apache.asterix.common.exceptions.CompilationException) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) Index(org.apache.asterix.metadata.entities.Index) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 30 with CompilationException

use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.

the class Dataset method getResourceFactory.

/**
     * Create the index dataflow helper factory for a particular index on the dataset
     *
     * @param mdProvider
     *            metadata provider to get metadata information, components, and runtimes
     * @param index
     *            the index to get the dataflow helper factory for
     * @param recordType
     *            the record type for the dataset
     * @param metaType
     *            the meta type for the dataset
     * @param mergePolicyFactory
     *            the merge policy factory of the dataset
     * @param mergePolicyProperties
     *            the merge policy properties for the dataset
     * @return indexDataflowHelperFactory
     *         an instance of {@link org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory}
     * @throws AlgebricksException
     *             if dataflow helper factory could not be created
     */
public IResourceFactory getResourceFactory(MetadataProvider mdProvider, Index index, ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties) throws AlgebricksException {
    ITypeTraits[] filterTypeTraits = DatasetUtil.computeFilterTypeTraits(this, recordType);
    IBinaryComparatorFactory[] filterCmpFactories = DatasetUtil.computeFilterBinaryComparatorFactories(this, recordType, mdProvider.getStorageComponentProvider().getComparatorFactoryProvider());
    IResourceFactory resourceFactory;
    switch(index.getIndexType()) {
        case BTREE:
            resourceFactory = bTreeResourceFactoryProvider.getResourceFactory(mdProvider, this, index, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterTypeTraits, filterCmpFactories);
            break;
        case RTREE:
            resourceFactory = rTreeResourceFactoryProvider.getResourceFactory(mdProvider, this, index, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterTypeTraits, filterCmpFactories);
            break;
        case LENGTH_PARTITIONED_NGRAM_INVIX:
        case LENGTH_PARTITIONED_WORD_INVIX:
        case SINGLE_PARTITION_NGRAM_INVIX:
        case SINGLE_PARTITION_WORD_INVIX:
            resourceFactory = invertedIndexResourceFactoryProvider.getResourceFactory(mdProvider, this, index, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterTypeTraits, filterCmpFactories);
            break;
        default:
            throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType().toString());
    }
    return new DatasetLocalResourceFactory(datasetId, resourceFactory);
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) DatasetLocalResourceFactory(org.apache.asterix.transaction.management.resource.DatasetLocalResourceFactory) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IResourceFactory(org.apache.hyracks.storage.common.IResourceFactory)

Aggregations

CompilationException (org.apache.asterix.common.exceptions.CompilationException)44 ArrayList (java.util.ArrayList)13 IAType (org.apache.asterix.om.types.IAType)12 ARecordType (org.apache.asterix.om.types.ARecordType)9 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)9 AsterixException (org.apache.asterix.common.exceptions.AsterixException)8 IOException (java.io.IOException)7 List (java.util.List)7 FunctionSignature (org.apache.asterix.common.functions.FunctionSignature)7 MetadataException (org.apache.asterix.metadata.MetadataException)7 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)7 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)7 RemoteException (java.rmi.RemoteException)6 ACIDException (org.apache.asterix.common.exceptions.ACIDException)6 Expression (org.apache.asterix.lang.common.base.Expression)6 Pair (org.apache.hyracks.algebricks.common.utils.Pair)6 IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)6 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)5 IDataset (org.apache.asterix.common.metadata.IDataset)5 Dataset (org.apache.asterix.metadata.entities.Dataset)5