Search in sources :

Example 46 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class DatasetUtil method createDatasetJobSpec.

public static JobSpecification createDatasetJobSpec(Dataset dataset, MetadataProvider metadataProvider) throws AlgebricksException {
    Index index = IndexUtil.getPrimaryIndex(dataset);
    ARecordType itemType = (ARecordType) metadataProvider.findType(dataset);
    // get meta item type
    ARecordType metaItemType = null;
    if (dataset.hasMetaPart()) {
        metaItemType = (ARecordType) metadataProvider.findMetaType(dataset);
    }
    JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
    Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset);
    FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
    StringBuilder sb = new StringBuilder();
    for (int i = 0; i < fs.length; i++) {
        sb.append(fs[i] + " ");
    }
    LOGGER.info("CREATING File Splits: " + sb.toString());
    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
    //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
    IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, index, itemType, metaItemType, compactionInfo.first, compactionInfo.second);
    IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first, resourceFactory, !dataset.isTemp());
    IndexCreateOperatorDescriptor indexCreateOp = new IndexCreateOperatorDescriptor(spec, indexBuilderFactory);
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp, splitsAndConstraint.second);
    spec.addRoot(indexCreateOp);
    return spec;
}
Also used : IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) IndexBuilderFactory(org.apache.hyracks.storage.am.common.build.IndexBuilderFactory) Index(org.apache.asterix.metadata.entities.Index) FileSplit(org.apache.hyracks.api.io.FileSplit) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ARecordType(org.apache.asterix.om.types.ARecordType) Map(java.util.Map) IResourceFactory(org.apache.hyracks.storage.common.IResourceFactory) ILSMMergePolicyFactory(org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory) IndexCreateOperatorDescriptor(org.apache.hyracks.storage.am.common.dataflow.IndexCreateOperatorDescriptor)

Example 47 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class ExternalIndexingOperations method buildRecoverOp.

public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException {
    JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
    IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager();
    ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>();
    AlgebricksPartitionConstraint constraints = null;
    for (Index index : indexes) {
        IFileSplitProvider indexSplitProvider;
        if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName());
            indexSplitProvider = sAndConstraints.first;
            constraints = sAndConstraints.second;
        } else {
            indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first;
        }
        IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider);
        treeDataflowHelperFactories.add(indexDataflowHelperFactory);
    }
    ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec, treeDataflowHelperFactories);
    spec.addRoot(op);
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints);
    spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
    return spec;
}
Also used : IStorageManager(org.apache.hyracks.storage.common.IStorageManager) ExternalDatasetIndexesRecoverOperatorDescriptor(org.apache.asterix.external.operators.ExternalDatasetIndexesRecoverOperatorDescriptor) ConnectorPolicyAssignmentPolicy(org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) ArrayList(java.util.ArrayList) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) Index(org.apache.asterix.metadata.entities.Index) JobSpecification(org.apache.hyracks.api.job.JobSpecification) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)

Example 48 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class ExternalIndexingOperations method buildCommitJob.

public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException {
    JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
    IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager();
    ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>();
    AlgebricksPartitionConstraint constraints = null;
    for (Index index : indexes) {
        IFileSplitProvider indexSplitProvider;
        if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName());
            indexSplitProvider = sAndConstraints.first;
            constraints = sAndConstraints.second;
        } else {
            indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first;
        }
        IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider);
        treeDataflowHelperFactories.add(indexDataflowHelperFactory);
    }
    ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec, treeDataflowHelperFactories);
    spec.addRoot(op);
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints);
    spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
    return spec;
}
Also used : IStorageManager(org.apache.hyracks.storage.common.IStorageManager) ExternalDatasetIndexesCommitOperatorDescriptor(org.apache.asterix.external.operators.ExternalDatasetIndexesCommitOperatorDescriptor) ConnectorPolicyAssignmentPolicy(org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) ArrayList(java.util.ArrayList) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) Index(org.apache.asterix.metadata.entities.Index) JobSpecification(org.apache.hyracks.api.job.JobSpecification) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)

Example 49 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class ExternalIndexingOperations method buildFilesIndexCreateJobSpec.

public static JobSpecification buildFilesIndexCreateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException {
    IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
    JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
    ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
    Map<String, String> mergePolicyProperties = compactionInfo.second;
    Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName()));
    IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
    String fileIndexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
    Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName);
    ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
    IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, fileIndex, recordType, null, mergePolicyFactory, mergePolicyProperties);
    IIndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider, resourceFactory, !dataset.isTemp());
    IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
    ExternalFilesIndexCreateOperatorDescriptor externalFilesOp = new ExternalFilesIndexCreateOperatorDescriptor(spec, indexBuilderFactory, dataflowHelperFactory, externalFilesSnapshot);
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second);
    spec.addRoot(externalFilesOp);
    spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
    return spec;
}
Also used : IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) ConnectorPolicyAssignmentPolicy(org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) IIndexBuilderFactory(org.apache.hyracks.storage.am.common.api.IIndexBuilderFactory) IndexBuilderFactory(org.apache.hyracks.storage.am.common.build.IndexBuilderFactory) Index(org.apache.asterix.metadata.entities.Index) ExternalFilesIndexCreateOperatorDescriptor(org.apache.asterix.external.operators.ExternalFilesIndexCreateOperatorDescriptor) IIndexBuilderFactory(org.apache.hyracks.storage.am.common.api.IIndexBuilderFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) JobSpecification(org.apache.hyracks.api.job.JobSpecification) Map(java.util.Map) ARecordType(org.apache.asterix.om.types.ARecordType) IResourceFactory(org.apache.hyracks.storage.common.IResourceFactory) IIndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory) IndexDataflowHelperFactory(org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory) ILSMMergePolicyFactory(org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory)

Example 50 with Index

use of org.apache.asterix.metadata.entities.Index in project asterixdb by apache.

the class BTreeSearchPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    if (requiresBroadcast) {
        // For primary indexes optimizing an equality condition we can reduce the broadcast requirement to hash partitioning.
        if (isPrimaryIndex && isEqCondition) {
            // If this is a composite primary index, then all of the keys should be provided.
            Index searchIndex = ((DataSourceIndex) idx).getIndex();
            int numberOfKeyFields = searchIndex.getKeyFieldNames().size();
            if (numberOfKeyFields < 2 || (lowKeyVarList.size() == numberOfKeyFields && highKeyVarList.size() == numberOfKeyFields)) {
                StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
                ListSet<LogicalVariable> searchKeyVars = new ListSet<>();
                searchKeyVars.addAll(lowKeyVarList);
                searchKeyVars.addAll(highKeyVarList);
                // Also, add a local sorting property to enforce a sort before the primary-index operator.
                List<ILocalStructuralProperty> propsLocal = new ArrayList<>();
                List<OrderColumn> orderColumns = new ArrayList<>();
                for (LogicalVariable orderVar : searchKeyVars) {
                    orderColumns.add(new OrderColumn(orderVar, OrderKind.ASC));
                }
                propsLocal.add(new LocalOrderProperty(orderColumns));
                pv[0] = new StructuralPropertiesVector(new UnorderedPartitionedProperty(searchKeyVars, domain), propsLocal);
                return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
            }
        }
        StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
        pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(domain), null);
        return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
    } else {
        return super.getRequiredPropertiesForChildren(op, reqdByParent, context);
    }
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) UnorderedPartitionedProperty(org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty) BroadcastPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) DataSourceIndex(org.apache.asterix.metadata.declared.DataSourceIndex) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) ArrayList(java.util.ArrayList) DataSourceIndex(org.apache.asterix.metadata.declared.DataSourceIndex) Index(org.apache.asterix.metadata.entities.Index) IDataSourceIndex(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements) ListSet(org.apache.hyracks.algebricks.common.utils.ListSet) LocalOrderProperty(org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty) ILocalStructuralProperty(org.apache.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty)

Aggregations

Index (org.apache.asterix.metadata.entities.Index)53 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)26 Dataset (org.apache.asterix.metadata.entities.Dataset)25 ArrayList (java.util.ArrayList)24 MetadataException (org.apache.asterix.metadata.MetadataException)20 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)16 ARecordType (org.apache.asterix.om.types.ARecordType)15 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)15 Pair (org.apache.hyracks.algebricks.common.utils.Pair)14 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)14 JobSpecification (org.apache.hyracks.api.job.JobSpecification)13 IIndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory)13 IndexDataflowHelperFactory (org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory)13 AsterixException (org.apache.asterix.common.exceptions.AsterixException)12 IAType (org.apache.asterix.om.types.IAType)12 IDataSourceIndex (org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex)12 IOException (java.io.IOException)11 CompilationException (org.apache.asterix.common.exceptions.CompilationException)11 List (java.util.List)10 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)10