use of org.apache.asterix.metadata.entities.FeedPolicyEntity in project asterixdb by apache.
the class MetadataNode method getDataversePolicies.
@Override
public List<FeedPolicyEntity> getDataversePolicies(JobId jobId, String dataverse) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverse);
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<FeedPolicyEntity> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.asterix.metadata.entities.FeedPolicyEntity in project asterixdb by apache.
the class MetadataManager method dropFeedPolicy.
@Override
public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverseName, String policyName) throws MetadataException {
FeedPolicyEntity feedPolicy;
try {
feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
metadataNode.dropFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
} catch (RemoteException e) {
throw new MetadataException(e);
}
mdTxnCtx.dropFeedPolicy(feedPolicy);
}
use of org.apache.asterix.metadata.entities.FeedPolicyEntity in project asterixdb by apache.
the class QueryTranslator method handleCreateFeedPolicyStatement.
protected void handleCreateFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt) throws AlgebricksException, HyracksDataException {
String dataverse;
String policy;
FeedPolicyEntity newPolicy = null;
MetadataTransactionContext mdTxnCtx = null;
CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
dataverse = getActiveDataverse(null);
policy = cfps.getPolicyName();
MetadataLockManager.INSTANCE.createFeedPolicyBegin(metadataProvider.getLocks(), dataverse, dataverse + "." + policy);
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
if (feedPolicy != null) {
if (cfps.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A policy with this name " + policy + " already exists.");
}
}
boolean extendingExisting = cfps.getSourcePolicyName() != null;
String description = cfps.getDescription() == null ? "" : cfps.getDescription();
if (extendingExisting) {
FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
throw new AlgebricksException("Unknown policy " + cfps.getSourcePolicyName());
}
}
Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
policyProperties.putAll(cfps.getProperties());
newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
} else {
Properties prop = new Properties();
try {
InputStream stream = new FileInputStream(cfps.getSourcePolicyFile());
prop.load(stream);
} catch (Exception e) {
throw new AlgebricksException("Unable to read policy file" + cfps.getSourcePolicyFile(), e);
}
Map<String, String> policyProperties = new HashMap<>();
for (Entry<Object, Object> entry : prop.entrySet()) {
policyProperties.put((String) entry.getKey(), (String) entry.getValue());
}
newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
}
MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (RemoteException | ACIDException e) {
abort(e, e, mdTxnCtx);
throw new HyracksDataException(e);
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.metadata.entities.FeedPolicyEntity in project asterixdb by apache.
the class MetadataNode method getFeedPolicy.
@Override
public FeedPolicyEntity getFeedPolicy(JobId jobId, String dataverse, String policyName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverse, policyName);
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
List<FeedPolicyEntity> results = new ArrayList<>();
IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
if (!results.isEmpty()) {
return results.get(0);
}
return null;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.asterix.metadata.entities.FeedPolicyEntity in project asterixdb by apache.
the class MetadataNode method dropDataverse.
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
confirmDataverseCanBeDeleted(jobId, dataverseName);
List<Dataset> dataverseDatasets;
Dataset ds;
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
// Drop all datasets in this dataverse.
for (int i = 0; i < dataverseDatasets.size(); i++) {
ds = dataverseDatasets.get(i);
dropDataset(jobId, dataverseName, ds.getDatasetName());
}
//After dropping datasets, drop datatypes
List<Datatype> dataverseDatatypes;
// As a side effect, acquires an S lock on the 'datatype' dataset
// on behalf of txnId.
dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
// Drop all types in this dataverse.
for (int i = 0; i < dataverseDatatypes.size(); i++) {
forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
}
// As a side effect, acquires an S lock on the 'Function' dataset
// on behalf of txnId.
List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
// Drop all functions in this dataverse.
for (Function function : dataverseFunctions) {
dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
}
// As a side effect, acquires an S lock on the 'Adapter' dataset
// on behalf of txnId.
List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
// Drop all functions in this dataverse.
for (DatasourceAdapter adapter : dataverseAdapters) {
dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
}
List<Feed> dataverseFeeds;
List<FeedConnection> feedConnections;
Feed feed;
dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
// Drop all feeds&connections in this dataverse.
for (int i = 0; i < dataverseFeeds.size(); i++) {
feed = dataverseFeeds.get(i);
feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
for (FeedConnection feedConnection : feedConnections) {
dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
}
dropFeed(jobId, dataverseName, feed.getFeedName());
}
List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
if (feedPolicies != null && feedPolicies.size() > 0) {
// Drop all feed ingestion policies in this dataverse.
for (FeedPolicyEntity feedPolicy : feedPolicies) {
dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
}
}
// Delete the dataverse entry from the 'dataverse' dataset.
ITupleReference searchKey = createTuple(dataverseName);
// As a side effect, acquires an S lock on the 'dataverse' dataset
// on behalf of txnId.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
Aggregations