Search in sources :

Example 36 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handlePregelixStatement.

protected void handlePregelixStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    RunStatement pregelixStmt = (RunStatement) stmt;
    boolean bActiveTxn = true;
    String dataverseNameFrom = getActiveDataverse(pregelixStmt.getDataverseNameFrom());
    String dataverseNameTo = getActiveDataverse(pregelixStmt.getDataverseNameTo());
    String datasetNameFrom = pregelixStmt.getDatasetNameFrom().getValue();
    String datasetNameTo = pregelixStmt.getDatasetNameTo().getValue();
    String fullyQualifiedDatasetNameTo = DatasetUtil.isFullyQualifiedName(datasetNameTo) ? datasetNameTo : dataverseNameTo + '.' + datasetNameTo;
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(metadataProvider.getLocks(), fullyQualifiedDatasetNameTo);
    try {
        prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo, datasetNameFrom, datasetNameTo, mdTxnCtx);
        String pregelixHomeKey = "PREGELIX_HOME";
        // Finds PREGELIX_HOME in system environment variables.
        String pregelixHome = System.getenv(pregelixHomeKey);
        // Finds PREGELIX_HOME in Java properties.
        if (pregelixHome == null) {
            pregelixHome = System.getProperty(pregelixHomeKey);
        }
        // Finds PREGELIX_HOME in AsterixDB configuration.
        if (pregelixHome == null) {
            // Since there is a default value for PREGELIX_HOME in CompilerProperties,
            // pregelixHome can never be null.
            pregelixHome = appCtx.getCompilerProperties().getPregelixHome();
        }
        // Constructs the pregelix command line.
        List<String> cmd = constructPregelixCommand(pregelixStmt, dataverseNameFrom, datasetNameFrom, dataverseNameTo, datasetNameTo);
        ProcessBuilder pb = new ProcessBuilder(cmd);
        pb.directory(new File(pregelixHome));
        pb.redirectErrorStream(true);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        // Executes the Pregelix command.
        int resultState = executeExternalShellProgram(pb);
        // Checks the return state of the external Pregelix command.
        if (resultState != 0) {
            throw new AlgebricksException("Something went wrong executing your Pregelix Job. Perhaps the Pregelix cluster " + "needs to be restarted. " + "Check the following things: Are the datatypes of Asterix and Pregelix matching? " + "Is the server configuration correct (node names, buffer sizes, framesize)? " + "Check the logfiles for more details.");
        }
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : RunStatement(org.apache.asterix.lang.common.statement.RunStatement) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ExternalFile(org.apache.asterix.external.indexing.ExternalFile) File(java.io.File) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 37 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleCompactStatement.

protected void handleCompactStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    CompactStatement compactStatement = (CompactStatement) stmt;
    String dataverseName = getActiveDataverse(compactStatement.getDataverseName());
    String datasetName = compactStatement.getDatasetName().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    List<JobSpecification> jobsToExecute = new ArrayList<>();
    MetadataLockManager.INSTANCE.compactBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    try {
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName + ".");
        }
        // Prepare jobs to compact the datatset and its indexes
        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
        if (indexes.isEmpty()) {
            throw new AlgebricksException("Cannot compact the extrenal dataset " + datasetName + " because it has no indexes");
        }
        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dataverseName);
        jobsToExecute.add(DatasetUtil.compactDatasetJobSpec(dataverse, datasetName, metadataProvider));
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            for (Index index : indexes) {
                if (index.isSecondaryIndex()) {
                    jobsToExecute.add(IndexUtil.buildSecondaryIndexCompactJobSpec(ds, index, metadataProvider));
                }
            }
        } else {
            prepareCompactJobsForExternalDataset(indexes, ds, jobsToExecute, metadataProvider);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        // #. run the jobs
        for (JobSpecification jobSpec : jobsToExecute) {
            JobUtils.runJob(hcc, jobSpec, true);
        }
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        ExternalDatasetsRegistry.INSTANCE.releaseAcquiredLocks(metadataProvider);
    }
}
Also used : IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) ArrayList(java.util.ArrayList) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) Dataverse(org.apache.asterix.metadata.entities.Dataverse) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) CompactStatement(org.apache.asterix.lang.common.statement.CompactStatement) JobSpecification(org.apache.hyracks.api.job.JobSpecification)

Example 38 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class QueryTranslator method handleCreateTypeStatement.

protected void handleCreateTypeStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
    TypeDecl stmtCreateType = (TypeDecl) stmt;
    String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
    String typeName = stmtCreateType.getIdent().getValue();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createTypeBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + typeName);
    try {
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
        if (dv == null) {
            throw new AlgebricksException("Unknown dataverse " + dataverseName);
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
        if (dt != null) {
            if (!stmtCreateType.getIfNotExists()) {
                throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
            }
        } else {
            if (BuiltinTypeMap.getBuiltinType(typeName) != null) {
                throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
            } else {
                Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
                TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
                IAType type = typeMap.get(typeSignature);
                MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
            }
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : TypeSignature(org.apache.asterix.om.types.TypeSignature) TypeDecl(org.apache.asterix.lang.common.statement.TypeDecl) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Dataverse(org.apache.asterix.metadata.entities.Dataverse) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) Datatype(org.apache.asterix.metadata.entities.Datatype) IAType(org.apache.asterix.om.types.IAType)

Example 39 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class RebalanceUtil method rebalanceSwitch.

private static void rebalanceSwitch(Dataset source, Dataset target, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
    MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
    // Acquires the metadata write lock for the source/target dataset.
    writeLockDataset(metadataProvider.getLocks(), source);
    Dataset sourceDataset = MetadataManagerUtil.findDataset(mdTxnCtx, source.getDataverseName(), source.getDatasetName());
    if (sourceDataset == null) {
        // The dataset has already been dropped.
        // In this case, we should drop the generated target dataset files.
        dropDatasetFiles(target, metadataProvider, hcc);
        return;
    }
    // Drops the source dataset files.
    dropDatasetFiles(source, metadataProvider, hcc);
    // Updates the dataset entry in the metadata storage
    MetadataManager.INSTANCE.updateDataset(mdTxnCtx, target);
    // Drops the metadata entry of source dataset's node group.
    String sourceNodeGroup = source.getNodeGroupName();
    MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), sourceNodeGroup);
    MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, sourceNodeGroup, true);
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext)

Example 40 with MetadataTransactionContext

use of org.apache.asterix.metadata.MetadataTransactionContext in project asterixdb by apache.

the class RebalanceUtil method rebalance.

/**
     * Rebalances an existing dataset to a list of target nodes.
     *
     * @param dataverseName,
     *            the dataverse name.
     * @param datasetName,
     *            the dataset name.
     * @param targetNcNames,
     *            the list of target nodes.
     * @param metadataProvider,
     *            the metadata provider.
     * @param hcc,
     *            the reusable hyracks connection.
     * @throws Exception
     */
public static void rebalance(String dataverseName, String datasetName, Set<String> targetNcNames, MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    Dataset sourceDataset;
    Dataset targetDataset;
    // that no one can drop the rebalance source dataset.
    try {
        // The source dataset.
        sourceDataset = metadataProvider.findDataset(dataverseName, datasetName);
        // If the source dataset doesn't exist, then it's a no-op.
        if (sourceDataset == null) {
            return;
        }
        Set<String> sourceNodes = new HashSet<>(metadataProvider.findNodes(sourceDataset.getNodeGroupName()));
        // The the source nodes are identical to the target nodes.
        if (sourceNodes.equals(targetNcNames)) {
            return;
        }
        // Creates a node group for rebalance.
        String nodeGroupName = DatasetUtil.createNodeGroupForNewDataset(sourceDataset.getDataverseName(), sourceDataset.getDatasetName(), sourceDataset.getRebalanceCount() + 1, targetNcNames, metadataProvider);
        // The target dataset for rebalance.
        targetDataset = new Dataset(sourceDataset, true, nodeGroupName);
        // Rebalances the source dataset into the target dataset.
        rebalance(sourceDataset, targetDataset, metadataProvider, hcc);
        // Complete the metadata transaction.
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().reset();
    }
    // Starts another transaction for switching the metadata entity.
    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    try {
        // Atomically switches the rebalance target to become the source dataset.
        rebalanceSwitch(sourceDataset, targetDataset, metadataProvider, hcc);
        // Complete the metadata transaction.
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        throw e;
    } finally {
        metadataProvider.getLocks().reset();
    }
}
Also used : Dataset(org.apache.asterix.metadata.entities.Dataset) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) AsterixException(org.apache.asterix.common.exceptions.AsterixException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HashSet(java.util.HashSet)

Aggregations

MetadataTransactionContext (org.apache.asterix.metadata.MetadataTransactionContext)41 RemoteException (java.rmi.RemoteException)32 ACIDException (org.apache.asterix.common.exceptions.ACIDException)32 IOException (java.io.IOException)30 AsterixException (org.apache.asterix.common.exceptions.AsterixException)30 MetadataException (org.apache.asterix.metadata.MetadataException)30 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)29 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)28 CompilationException (org.apache.asterix.common.exceptions.CompilationException)27 Dataset (org.apache.asterix.metadata.entities.Dataset)14 JobSpecification (org.apache.hyracks.api.job.JobSpecification)12 ArrayList (java.util.ArrayList)11 Dataverse (org.apache.asterix.metadata.entities.Dataverse)11 IDataset (org.apache.asterix.common.metadata.IDataset)8 IHyracksDataset (org.apache.hyracks.api.dataset.IHyracksDataset)8 ActiveJobNotificationHandler (org.apache.asterix.active.ActiveJobNotificationHandler)7 ActiveLifecycleListener (org.apache.asterix.active.ActiveLifecycleListener)7 EntityId (org.apache.asterix.active.EntityId)6 Function (org.apache.asterix.metadata.entities.Function)6 ARecordType (org.apache.asterix.om.types.ARecordType)6