Search in sources :

Example 6 with AlgebricksAbsolutePartitionConstraint

use of org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint in project asterixdb by apache.

the class ClusterStateManager method resetClusterPartitionConstraint.

private synchronized void resetClusterPartitionConstraint() {
    ArrayList<String> clusterActiveLocations = new ArrayList<>();
    for (ClusterPartition p : clusterPartitions.values()) {
        if (p.isActive()) {
            clusterActiveLocations.add(p.getActiveNodeId());
        }
    }
    clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(clusterActiveLocations.toArray(new String[] {}));
}
Also used : AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ArrayList(java.util.ArrayList) ClusterPartition(org.apache.asterix.common.cluster.ClusterPartition)

Example 7 with AlgebricksAbsolutePartitionConstraint

use of org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint in project asterixdb by apache.

the class PigletMetadataProvider method getScannerRuntime.

@Override
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(IDataSource<String> dataSource, List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed, List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec, Object implConfig) throws AlgebricksException {
    PigletFileDataSource ds = (PigletFileDataSource) dataSource;
    FileSplit[] fileSplits = ds.getFileSplits();
    String[] locations = new String[fileSplits.length];
    for (int i = 0; i < fileSplits.length; ++i) {
        locations[i] = fileSplits[i].getNodeName();
    }
    IFileSplitProvider fsp = new ConstantFileSplitProvider(fileSplits);
    Object[] colTypes = ds.getSchemaTypes();
    IValueParserFactory[] vpfs = new IValueParserFactory[colTypes.length];
    ISerializerDeserializer[] serDesers = new ISerializerDeserializer[colTypes.length];
    for (int i = 0; i < colTypes.length; ++i) {
        Type colType = (Type) colTypes[i];
        IValueParserFactory vpf;
        ISerializerDeserializer serDeser;
        switch(colType.getTag()) {
            case INTEGER:
                vpf = IntegerParserFactory.INSTANCE;
                serDeser = IntegerSerializerDeserializer.INSTANCE;
                break;
            case CHAR_ARRAY:
                vpf = UTF8StringParserFactory.INSTANCE;
                serDeser = new UTF8StringSerializerDeserializer();
                break;
            case FLOAT:
                vpf = FloatParserFactory.INSTANCE;
                serDeser = FloatSerializerDeserializer.INSTANCE;
                break;
            default:
                throw new UnsupportedOperationException();
        }
        vpfs[i] = vpf;
        serDesers[i] = serDeser;
    }
    ITupleParserFactory tpf = new DelimitedDataTupleParserFactory(vpfs, ',');
    RecordDescriptor rDesc = new RecordDescriptor(serDesers);
    IOperatorDescriptor scanner = new FileScanOperatorDescriptor(jobSpec, fsp, tpf, rDesc);
    AlgebricksAbsolutePartitionConstraint constraint = new AlgebricksAbsolutePartitionConstraint(locations);
    return new Pair<>(scanner, constraint);
}
Also used : IValueParserFactory(org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) ConstantFileSplitProvider(org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider) DelimitedDataTupleParserFactory(org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory) FileSplit(org.apache.hyracks.api.io.FileSplit) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) Type(org.apache.hyracks.algebricks.examples.piglet.types.Type) ITupleParserFactory(org.apache.hyracks.dataflow.std.file.ITupleParserFactory) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) FileScanOperatorDescriptor(org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 8 with AlgebricksAbsolutePartitionConstraint

use of org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint in project asterixdb by apache.

the class HDFSUtils method getPartitionConstraints.

public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx, AlgebricksAbsolutePartitionConstraint clusterLocations) {
    if (clusterLocations == null) {
        ArrayList<String> locs = new ArrayList<>();
        Map<String, String[]> stores = appCtx.getMetadataProperties().getStores();
        for (String node : stores.keySet()) {
            int numIODevices = ClusterStateManager.INSTANCE.getIODevices(node).length;
            for (int k = 0; k < numIODevices; k++) {
                locs.add(node);
            }
        }
        String[] cluster = new String[locs.size()];
        cluster = locs.toArray(cluster);
        return new AlgebricksAbsolutePartitionConstraint(cluster);
    }
    return clusterLocations;
}
Also used : AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) ArrayList(java.util.ArrayList) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 9 with AlgebricksAbsolutePartitionConstraint

use of org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint in project asterixdb by apache.

the class APIFramework method chooseLocations.

// Chooses the location constraints, i.e., whether to use storage parallelism or use a user-sepcified number
// of cores.
private static AlgebricksAbsolutePartitionConstraint chooseLocations(IClusterInfoCollector clusterInfoCollector, int parallelismHint, AlgebricksAbsolutePartitionConstraint storageLocations) throws AlgebricksException {
    try {
        Map<String, NodeControllerInfo> ncMap = clusterInfoCollector.getNodeControllerInfos();
        // Gets total number of cores in the cluster.
        int totalNumCores = getTotalNumCores(ncMap);
        // Otherwise, we will use all available cores.
        if (parallelismHint == CompilerProperties.COMPILER_PARALLELISM_AS_STORAGE && storageLocations.getLocations().length <= totalNumCores) {
            return storageLocations;
        }
        return getComputationLocations(ncMap, parallelismHint);
    } catch (HyracksException e) {
        throw new AlgebricksException(e);
    }
}
Also used : NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Example 10 with AlgebricksAbsolutePartitionConstraint

use of org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint in project asterixdb by apache.

the class QueryTranslator method handleStartFeedStatement.

private void handleStartFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    StartFeedStatement sfs = (StartFeedStatement) stmt;
    String dataverseName = getActiveDataverse(sfs.getDataverseName());
    String feedName = sfs.getFeedName().getValue();
    // Transcation handler
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    // Runtime handler
    EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
    // Feed & Feed Connections
    Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
    List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
    ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
    IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
    DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
    ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
    ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
    FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
    if (listener != null) {
        throw new AlgebricksException("Feed " + feedName + " is started already.");
    }
    // Start
    MetadataLockManager.INSTANCE.startFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName, feedConnections);
    try {
        // Prepare policy
        List<IDataset> datasets = new ArrayList<>();
        for (FeedConnection connection : feedConnections) {
            Dataset ds = metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
            datasets.add(ds);
        }
        org.apache.commons.lang3.tuple.Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> jobInfo = FeedOperations.buildStartFeedJob(sessionOutput, metadataProvider, feed, feedConnections, compilationProvider, storageComponentProvider, qtFactory, hcc);
        JobSpecification feedJob = jobInfo.getLeft();
        listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
        activeEventHandler.registerListener(listener);
        IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
        feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
        JobUtils.runJob(hcc, feedJob, Boolean.valueOf(metadataProvider.getConfig().get(StartFeedStatement.WAIT_FOR_COMPLETION)));
        eventSubscriber.sync();
        LOGGER.log(Level.INFO, "Submitted");
    } catch (Exception e) {
        abort(e, e, mdTxnCtx);
        if (listener != null) {
            activeEventHandler.unregisterListener(listener);
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : FeedEventsListener(org.apache.asterix.external.feed.management.FeedEventsListener) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ActiveLifecycleListener(org.apache.asterix.active.ActiveLifecycleListener) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IDataset(org.apache.asterix.common.metadata.IDataset) JobSpecification(org.apache.hyracks.api.job.JobSpecification) ActiveJobNotificationHandler(org.apache.asterix.active.ActiveJobNotificationHandler) Feed(org.apache.asterix.metadata.entities.Feed) AqlCompilationProvider(org.apache.asterix.compiler.provider.AqlCompilationProvider) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) FeedConnection(org.apache.asterix.metadata.entities.FeedConnection) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) IActiveEventSubscriber(org.apache.asterix.active.IActiveEventSubscriber) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IStorageComponentProvider(org.apache.asterix.common.context.IStorageComponentProvider) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) EntityId(org.apache.asterix.active.EntityId) StartFeedStatement(org.apache.asterix.lang.common.statement.StartFeedStatement) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)

Aggregations

AlgebricksAbsolutePartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint)18 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)8 ArrayList (java.util.ArrayList)7 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)6 Pair (org.apache.hyracks.algebricks.common.utils.Pair)5 NodeControllerInfo (org.apache.hyracks.api.client.NodeControllerInfo)4 FileSplit (org.apache.hyracks.api.io.FileSplit)4 ILangCompilationProvider (org.apache.asterix.compiler.provider.ILangCompilationProvider)3 IOException (java.io.IOException)2 HashMap (java.util.HashMap)2 TreeSet (java.util.TreeSet)2 SinkWriterRuntimeFactory (org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory)2 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)2 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)2 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)2 JobSpecification (org.apache.hyracks.api.job.JobSpecification)2 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 File (java.io.File)1 RemoteException (java.rmi.RemoteException)1