Search in sources :

Example 6 with IStorageComponentProvider

use of org.apache.asterix.common.context.IStorageComponentProvider in project asterixdb by apache.

the class ExecuteStatementRequestMessage method handle.

@Override
public void handle(ICcApplicationContext ccAppCtx) throws HyracksDataException, InterruptedException {
    ICCServiceContext ccSrvContext = ccAppCtx.getServiceContext();
    ClusterControllerService ccSrv = (ClusterControllerService) ccSrvContext.getControllerService();
    CCApplication ccApp = (CCApplication) ccSrv.getApplication();
    CCMessageBroker messageBroker = (CCMessageBroker) ccSrvContext.getMessageBroker();
    CCExtensionManager ccExtMgr = (CCExtensionManager) ccAppCtx.getExtensionManager();
    ILangCompilationProvider compilationProvider = ccExtMgr.getCompilationProvider(lang);
    IStorageComponentProvider storageComponentProvider = ccAppCtx.getStorageComponentProvider();
    IStatementExecutorFactory statementExecutorFactory = ccApp.getStatementExecutorFactory();
    IStatementExecutorContext statementExecutorContext = ccApp.getStatementExecutorContext();
    ccSrv.getExecutor().submit(() -> {
        ExecuteStatementResponseMessage responseMsg = new ExecuteStatementResponseMessage(requestMessageId);
        try {
            final IClusterManagementWork.ClusterState clusterState = ClusterStateManager.INSTANCE.getState();
            if (clusterState != IClusterManagementWork.ClusterState.ACTIVE) {
                throw new IllegalStateException("Cannot execute request, cluster is " + clusterState);
            }
            IParser parser = compilationProvider.getParserFactory().createParser(statementsText);
            List<Statement> statements = parser.parse();
            StringWriter outWriter = new StringWriter(256);
            PrintWriter outPrinter = new PrintWriter(outWriter);
            SessionOutput.ResultDecorator resultPrefix = ResultUtil.createPreResultDecorator();
            SessionOutput.ResultDecorator resultPostfix = ResultUtil.createPostResultDecorator();
            SessionOutput.ResultAppender appendHandle = ResultUtil.createResultHandleAppender(handleUrl);
            SessionOutput.ResultAppender appendStatus = ResultUtil.createResultStatusAppender();
            SessionOutput sessionOutput = new SessionOutput(sessionConfig, outPrinter, resultPrefix, resultPostfix, appendHandle, appendStatus);
            IStatementExecutor.ResultMetadata outMetadata = new IStatementExecutor.ResultMetadata();
            MetadataManager.INSTANCE.init();
            IStatementExecutor translator = statementExecutorFactory.create(ccAppCtx, statements, sessionOutput, compilationProvider, storageComponentProvider);
            translator.compileAndExecute(ccAppCtx.getHcc(), null, delivery, outMetadata, new IStatementExecutor.Stats(), clientContextID, statementExecutorContext);
            outPrinter.close();
            responseMsg.setResult(outWriter.toString());
            responseMsg.setMetadata(outMetadata);
        } catch (AlgebricksException | HyracksException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
            // we trust that "our" exceptions are serializable and have a comprehensible error message
            GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
            responseMsg.setError(pe);
        } catch (Exception e) {
            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
            responseMsg.setError(new Exception(e.toString()));
        }
        try {
            messageBroker.sendApplicationMessageToNC(responseMsg, requestNodeId);
        } catch (Exception e) {
            LOGGER.log(Level.WARNING, e.toString(), e);
        }
    });
}
Also used : ICCServiceContext(org.apache.hyracks.api.application.ICCServiceContext) CCApplication(org.apache.asterix.hyracks.bootstrap.CCApplication) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) StringWriter(java.io.StringWriter) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IStatementExecutorFactory(org.apache.asterix.translator.IStatementExecutorFactory) PrintWriter(java.io.PrintWriter) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) IStatementExecutorContext(org.apache.asterix.translator.IStatementExecutorContext) Statement(org.apache.asterix.lang.common.base.Statement) CCMessageBroker(org.apache.asterix.messaging.CCMessageBroker) IClusterManagementWork(org.apache.asterix.common.api.IClusterManagementWork) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) CCExtensionManager(org.apache.asterix.app.cc.CCExtensionManager) IStatementExecutor(org.apache.asterix.translator.IStatementExecutor) SessionOutput(org.apache.asterix.translator.SessionOutput) ClusterControllerService(org.apache.hyracks.control.cc.ClusterControllerService) IParser(org.apache.asterix.lang.common.base.IParser)

Example 7 with IStorageComponentProvider

use of org.apache.asterix.common.context.IStorageComponentProvider in project asterixdb by apache.

the class QueryTranslator method handleStartFeedStatement.

private void handleStartFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    StartFeedStatement sfs = (StartFeedStatement) stmt;
    String dataverseName = getActiveDataverse(sfs.getDataverseName());
    String feedName = sfs.getFeedName().getValue();
    // Transcation handler
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    // Runtime handler
    EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
    // Feed & Feed Connections
    Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
    List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
    ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
    IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
    DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
    if (listener != null) {
        throw new AlgebricksException("Feed " + feedName + " is started already.");
    }
    // Start
    MetadataLockManager.INSTANCE.startFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName, feedConnections);
    try {
        // Prepare policy
        List<IDataset> datasets = new ArrayList<>();
        for (FeedConnection connection : feedConnections) {
            Dataset ds = metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
            datasets.add(ds);
        }
        org.apache.commons.lang3.tuple.Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> jobInfo = FeedOperations.buildStartFeedJob(sessionOutput, metadataProvider, feed, feedConnections, compilationProvider, storageComponentProvider, qtFactory, hcc);
        JobSpecification feedJob = jobInfo.getLeft();
        listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
        activeEventHandler.registerListener(listener);
        IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
        feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
        JobUtils.runJob(hcc, feedJob, Boolean.valueOf(metadataProvider.getConfig().get(StartFeedStatement.WAIT_FOR_COMPLETION)));
        eventSubscriber.sync();
        LOGGER.log(Level.INFO, "Submitted");
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        if (listener != null) {
            activeEventHandler.unregisterListener(listener);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : FeedEventsListener(org.apache.asterix.external.feed.management.FeedEventsListener) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IDataset(org.apache.asterix.common.metadata.IDataset) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) Feed(org.apache.asterix.metadata.entities.Feed) AqlCompilationProvider(org.apache.asterix.compiler.provider.AqlCompilationProvider) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) IActiveEventSubscriber(org.apache.asterix.active.IActiveEventSubscriber) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) EntityId(org.apache.asterix.active.EntityId) StartFeedStatement(org.apache.asterix.lang.common.statement.StartFeedStatement) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 8 with IStorageComponentProvider

use of org.apache.asterix.common.context.IStorageComponentProvider in project asterixdb by apache.

the class DatasetUtil method createPrimaryIndexUpsertOp.

/**
     * Creates a primary index upsert operator for a given dataset.
     *
     * @param spec,
     *            the job specification.
     * @param metadataProvider,
     *            the metadata provider.
     * @param dataset,
     *            the dataset to upsert.
     * @param inputRecordDesc,the
     *            record descriptor for an input tuple.
     * @param fieldPermutation,
     *            the field permutation according to the input.
     * @param missingWriterFactory,
     *            the factory for customizing missing value serialization.
     * @return a primary index scan operator and its location constraints.
     * @throws AlgebricksException
     */
public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> createPrimaryIndexUpsertOp(JobSpecification spec, MetadataProvider metadataProvider, Dataset dataset, RecordDescriptor inputRecordDesc, int[] fieldPermutation, IMissingWriterFactory missingWriterFactory) throws AlgebricksException {
    int numKeys = dataset.getPrimaryKeys().size();
    int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
    ARecordType itemType = (ARecordType) metadataProvider.findType(dataset);
    ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset);
    try {
        Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName());
        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset);
        // prepare callback
        JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
        int[] primaryKeyFields = new int[numKeys];
        for (int i = 0; i < numKeys; i++) {
            primaryKeyFields[i] = i;
        }
        boolean hasSecondaries = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1;
        IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
        IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
        ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(storageComponentProvider, primaryIndex, jobId, IndexOperation.UPSERT, primaryKeyFields);
        IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
        LSMPrimaryUpsertOperatorDescriptor op;
        ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
        ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
        // add the previous record first
        int f = 0;
        outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType);
        f++;
        // add the previous meta second
        if (dataset.hasMetaPart()) {
            outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(metaItemType);
            outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(metaItemType);
            f++;
        }
        // add the previous filter third
        int fieldIdx = -1;
        if (numFilterFields > 0) {
            String filterField = DatasetUtil.getFilterField(dataset).get(0);
            String[] fieldNames = itemType.getFieldNames();
            int i = 0;
            for (; i < fieldNames.length; i++) {
                if (fieldNames[i].equals(filterField)) {
                    break;
                }
            }
            fieldIdx = i;
            outputTypeTraits[f] = FormatUtils.getDefaultFormat().getTypeTraitProvider().getTypeTrait(itemType.getFieldTypes()[fieldIdx]);
            outputSerDes[f] = FormatUtils.getDefaultFormat().getSerdeProvider().getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]);
            f++;
        }
        for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) {
            outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j];
            outputSerDes[j + f] = inputRecordDesc.getFields()[j];
        }
        RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits);
        op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, missingWriterFactory, modificationCallbackFactory, searchCallbackFactory, dataset.getFrameOpCallbackFactory(), numKeys, itemType, fieldIdx, hasSecondaries);
        return new Pair<>(op, splitsAndConstraint.second);
    } catch (MetadataException me) {
        throw new AlgebricksException(me);
    }
}
Also used : LSMPrimaryUpsertOperatorDescriptor(org.apache.asterix.runtime.operators.LSMPrimaryUpsertOperatorDescriptor) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) Index(org.apache.asterix.metadata.entities.Index) AMutableString(org.apache.asterix.om.base.AMutableString) AString(org.apache.asterix.om.base.AString) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) JobId(org.apache.asterix.common.transactions.JobId) Pair(org.apache.hyracks.algebricks.common.utils.Pair) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) JobEventListenerFactory(org.apache.asterix.runtime.job.listener.JobEventListenerFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) ISearchOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IModificationOperationCallbackFactory(org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory) ARecordType(org.apache.asterix.om.types.ARecordType)

Example 9 with IStorageComponentProvider

use of org.apache.asterix.common.context.IStorageComponentProvider in project asterixdb by apache.

the class RTreeResourceFactoryProvider method getResourceFactory.

@Override
public IResourceFactory getResourceFactory(MetadataProvider mdProvider, Dataset dataset, Index index, ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories) throws AlgebricksException {
    if (index.getKeyFieldNames().size() != 1) {
        throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, index.getKeyFieldNames().size(), index.getIndexType(), 1);
    }
    IAType spatialType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), index.getKeyFieldNames().get(0), recordType).first;
    if (spatialType == null) {
        throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND, StringUtils.join(index.getKeyFieldNames().get(0), '.'));
    }
    List<List<String>> primaryKeyFields = dataset.getPrimaryKeys();
    int numPrimaryKeys = primaryKeyFields.size();
    ITypeTraits[] primaryTypeTraits = null;
    IBinaryComparatorFactory[] primaryComparatorFactories = null;
    IStorageComponentProvider storageComponentProvider = mdProvider.getStorageComponentProvider();
    if (dataset.getDatasetType() == DatasetType.INTERNAL) {
        primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1 + (dataset.hasMetaPart() ? 1 : 0)];
        primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
        List<Integer> indicators = null;
        if (dataset.hasMetaPart()) {
            indicators = ((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator();
        }
        for (int i = 0; i < numPrimaryKeys; i++) {
            IAType keyType = (indicators == null || indicators.get(i) == 0) ? recordType.getSubFieldType(primaryKeyFields.get(i)) : metaType.getSubFieldType(primaryKeyFields.get(i));
            primaryComparatorFactories[i] = storageComponentProvider.getComparatorFactoryProvider().getBinaryComparatorFactory(keyType, true);
            primaryTypeTraits[i] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(keyType);
        }
        primaryTypeTraits[numPrimaryKeys] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(recordType);
        if (dataset.hasMetaPart()) {
            primaryTypeTraits[numPrimaryKeys + 1] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(recordType);
        }
    }
    boolean isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
    int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
    int numNestedSecondaryKeyFields = numDimensions * 2;
    IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
    IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
    ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
    IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
    ATypeTag keyType = nestedKeyType.getTypeTag();
    for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
        secondaryComparatorFactories[i] = storageComponentProvider.getComparatorFactoryProvider().getBinaryComparatorFactory(nestedKeyType, true);
        secondaryTypeTraits[i] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(nestedKeyType);
        valueProviderFactories[i] = storageComponentProvider.getPrimitiveValueProviderFactory();
    }
    for (int i = 0; i < numPrimaryKeys; i++) {
        secondaryTypeTraits[numNestedSecondaryKeyFields + i] = (dataset.getDatasetType() == DatasetType.INTERNAL) ? primaryTypeTraits[i] : IndexingConstants.getTypeTraits(i);
    }
    int[] rtreeFields = null;
    if (filterTypeTraits != null && filterTypeTraits.length > 0) {
        rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
        for (int i = 0; i < rtreeFields.length; i++) {
            rtreeFields[i] = i;
        }
    }
    IStorageManager storageManager = storageComponentProvider.getStorageManager();
    ILSMOperationTrackerFactory opTrackerFactory = dataset.getIndexOperationTrackerFactory(index);
    ILSMIOOperationCallbackFactory ioOpCallbackFactory = dataset.getIoOperationCallbackFactory(index);
    IMetadataPageManagerFactory metadataPageManagerFactory = storageComponentProvider.getMetadataPageManagerFactory();
    ILSMIOOperationSchedulerProvider ioSchedulerProvider = storageComponentProvider.getIoOperationSchedulerProvider();
    boolean durable = !dataset.isTemp();
    ILinearizeComparatorFactory linearizeCmpFactory = MetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length);
    ITypeTraits[] typeTraits = getTypeTraits(mdProvider, dataset, index, recordType, metaType);
    IBinaryComparatorFactory[] rtreeCmpFactories = getCmpFactories(mdProvider, index, recordType, metaType);
    int[] secondaryFilterFields = (filterTypeTraits != null && filterTypeTraits.length > 0) ? new int[] { numNestedSecondaryKeyFields + numPrimaryKeys } : null;
    IBinaryComparatorFactory[] btreeCompFactories = dataset.getDatasetType() == DatasetType.EXTERNAL ? IndexingConstants.getBuddyBtreeComparatorFactories() : getComparatorFactoriesForDeletedKeyBTree(secondaryTypeTraits, primaryComparatorFactories, secondaryComparatorFactories);
    if (dataset.getDatasetType() == DatasetType.INTERNAL) {
        AsterixVirtualBufferCacheProvider vbcProvider = new AsterixVirtualBufferCacheProvider(dataset.getDatasetId());
        return new LSMRTreeWithAntiMatterLocalResourceFactory(storageManager, typeTraits, rtreeCmpFactories, filterTypeTraits, filterCmpFactories, secondaryFilterFields, opTrackerFactory, ioOpCallbackFactory, metadataPageManagerFactory, vbcProvider, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, durable, valueProviderFactories, rTreePolicyType, linearizeCmpFactory, rtreeFields, isPointMBR, btreeCompFactories);
    } else {
        return new ExternalRTreeLocalResourceFactory(storageManager, typeTraits, rtreeCmpFactories, filterTypeTraits, filterCmpFactories, secondaryFilterFields, opTrackerFactory, ioOpCallbackFactory, metadataPageManagerFactory, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, durable, btreeCompFactories, valueProviderFactories, rTreePolicyType, linearizeCmpFactory, rtreeFields, new int[] { numNestedSecondaryKeyFields }, isPointMBR, mdProvider.getStorageProperties().getBloomFilterFalsePositiveRate());
    }
}
Also used : AsterixVirtualBufferCacheProvider(org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider) ILSMIOOperationSchedulerProvider(org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider) List(java.util.List) CompilationException(org.apache.asterix.common.exceptions.CompilationException) ILSMIOOperationCallbackFactory(org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) IPrimitiveValueProviderFactory(org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory) ILinearizeComparatorFactory(org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory) IMetadataPageManagerFactory(org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) ILSMOperationTrackerFactory(org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory) IStorageManager(org.apache.hyracks.storage.common.IStorageManager) ATypeTag(org.apache.asterix.om.types.ATypeTag) LSMRTreeWithAntiMatterLocalResourceFactory(org.apache.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeWithAntiMatterLocalResourceFactory) ExternalRTreeLocalResourceFactory(org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeLocalResourceFactory) IAType(org.apache.asterix.om.types.IAType)

Example 10 with IStorageComponentProvider

use of org.apache.asterix.common.context.IStorageComponentProvider in project asterixdb by apache.

the class ExternalIndexingOperations method buildFilesIndexUpdateJobSpec.

public static JobSpecification buildFilesIndexUpdateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException {
    IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
    JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
    Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName()));
    IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
    IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
    ExternalFilesIndexModificationOperatorDescriptor externalFilesOp = new ExternalFilesIndexModificationOperatorDescriptor(spec, dataflowHelperFactory, externalFilesSnapshot);
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second);
    spec.addRoot(externalFilesOp);
    spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
    return spec;
}
Also used : IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) ConnectorPolicyAssignmentPolicy(org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) JobSpecification(org.apache.hyracks.api.job.JobSpecification) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) ExternalFilesIndexModificationOperatorDescriptor(org.apache.asterix.external.operators.ExternalFilesIndexModificationOperatorDescriptor)

Aggregations

IStorageComponentProvider (org.apache.asterix.common.context.IStorageComponentProvider)12 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)6 IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)5 List (java.util.List)4 AsterixVirtualBufferCacheProvider (org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider)4 CompilationException (org.apache.asterix.common.exceptions.CompilationException)4 JobSpecification (org.apache.hyracks.api.job.JobSpecification)4 ILSMIOOperationCallbackFactory (org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory)4 ILSMOperationTrackerFactory (org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory)4 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)3 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)3 ConnectorPolicyAssignmentPolicy (org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy)3 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)3 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)3 IMetadataPageManagerFactory (org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory)3 IIndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory)3 IndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)3 ILSMIOOperationSchedulerProvider (org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider)3 IStorageManager (org.apache.hyracks.storage.common.IStorageManager)3 AsterixException (org.apache.asterix.common.exceptions.AsterixException)2