use of org.apache.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor in project asterixdb by apache.
the class FeedOperations method buildRemoveFeedStorageJob.
public static JobSpecification buildRemoveFeedStorageJob(MetadataProvider metadataProvider, Feed feed) throws AsterixException {
JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
AlgebricksAbsolutePartitionConstraint allCluster = ClusterStateManager.INSTANCE.getClusterLocations();
Set<String> nodes = new TreeSet<>();
for (String node : allCluster.getLocations()) {
nodes.add(node);
}
AlgebricksAbsolutePartitionConstraint locations = new AlgebricksAbsolutePartitionConstraint(nodes.toArray(new String[nodes.size()]));
FileSplit[] feedLogFileSplits = FeedUtils.splitsForAdapter(feed.getDataverseName(), feed.getFeedName(), locations);
org.apache.hyracks.algebricks.common.utils.Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spC = StoragePathUtil.splitProviderAndPartitionConstraints(feedLogFileSplits);
FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(spec, spC.first, true);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, frod, spC.second);
spec.addRoot(frod);
return spec;
}
use of org.apache.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor in project asterixdb by apache.
the class DataverseUtil method dropDataverseJobSpec.
public static JobSpecification dropDataverseJobSpec(Dataverse dataverse, MetadataProvider metadata) {
JobSpecification jobSpec = RuntimeUtils.createJobSpecification(metadata.getApplicationContext());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata.splitAndConstraints(dataverse.getDataverseName());
FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first, false);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
jobSpec.addRoot(frod);
return jobSpec;
}
Aggregations