use of org.apache.asterix.active.EntityId in project asterixdb by apache.
the class QueryTranslator method handleDropFeedStatement.
protected void handleDropFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt;
String dataverseName = getActiveDataverse(stmtFeedDrop.getDataverseName());
String feedName = stmtFeedDrop.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.dropFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName);
try {
Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
if (feed == null) {
if (!stmtFeedDrop.getIfExists()) {
throw new AlgebricksException("There is no feed with this name " + feedName + ".");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
EntityId feedId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(feedId);
if (listener != null) {
throw new AlgebricksException("Feed " + feedId + " is currently active and connected to the following dataset(s) \n" + listener.toString());
} else {
JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider, MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName()));
JobUtils.runJob(hcc, spec, true);
MetadataManager.INSTANCE.dropFeed(mdTxnCtx, dataverseName, feedName);
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Removed feed " + feedId);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.active.EntityId in project asterixdb by apache.
the class QueryTranslator method handleStartFeedStatement.
private void handleStartFeedStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
StartFeedStatement sfs = (StartFeedStatement) stmt;
String dataverseName = getActiveDataverse(sfs.getDataverseName());
String feedName = sfs.getFeedName().getValue();
// Transcation handler
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// Runtime handler
EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
// Feed & Feed Connections
Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
List<FeedConnection> feedConnections = MetadataManager.INSTANCE.getFeedConections(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
if (listener != null) {
throw new AlgebricksException("Feed " + feedName + " is started already.");
}
// Start
MetadataLockManager.INSTANCE.startFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + feedName, feedConnections);
try {
// Prepare policy
List<IDataset> datasets = new ArrayList<>();
for (FeedConnection connection : feedConnections) {
Dataset ds = metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
datasets.add(ds);
}
org.apache.commons.lang3.tuple.Pair<JobSpecification, AlgebricksAbsolutePartitionConstraint> jobInfo = FeedOperations.buildStartFeedJob(sessionOutput, metadataProvider, feed, feedConnections, compilationProvider, storageComponentProvider, qtFactory, hcc);
JobSpecification feedJob = jobInfo.getLeft();
listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
activeEventHandler.registerListener(listener);
IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
JobUtils.runJob(hcc, feedJob, Boolean.valueOf(metadataProvider.getConfig().get(StartFeedStatement.WAIT_FOR_COMPLETION)));
eventSubscriber.sync();
LOGGER.log(Level.INFO, "Submitted");
} catch (Exception e) {
abort(e, e, mdTxnCtx);
if (listener != null) {
activeEventHandler.unregisterListener(listener);
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.active.EntityId in project asterixdb by apache.
the class InputHandlerTest method createInputHandler.
private FeedRuntimeInputHandler createInputHandler(IHyracksTaskContext ctx, IFrameWriter writer, FeedPolicyAccessor fpa, ConcurrentFramePool framePool) throws HyracksDataException {
FrameTupleAccessor fta = Mockito.mock(FrameTupleAccessor.class);
EntityId feedId = new EntityId(FeedUtils.FEED_EXTENSION_NAME, DATAVERSE, FEED);
FeedConnectionId connectionId = new FeedConnectionId(feedId, DATASET);
ActiveRuntimeId runtimeId = new ActiveRuntimeId(feedId, FeedRuntimeType.COLLECT.toString(), 0);
return new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, writer, fpa, fta, framePool);
}
use of org.apache.asterix.active.EntityId in project asterixdb by apache.
the class SubscribeFeedStatement method initialize.
public void initialize(MetadataTransactionContext mdTxnCtx) throws MetadataException {
this.query = new Query(false);
EntityId sourceFeedId = connectionRequest.getReceivingFeedId();
Feed subscriberFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, connectionRequest.getReceivingFeedId().getDataverse(), connectionRequest.getReceivingFeedId().getEntityName());
if (subscriberFeed == null) {
throw new IllegalStateException(" Subscriber feed " + subscriberFeed + " not found.");
}
String feedOutputType = getOutputType(mdTxnCtx);
StringBuilder builder = new StringBuilder();
builder.append("use dataverse " + sourceFeedId.getDataverse() + ";\n");
builder.append("set" + " " + FunctionUtil.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
builder.append("set" + " " + FeedActivityDetails.FEED_POLICY_NAME + " " + "'" + connectionRequest.getPolicy() + "'" + ";\n");
builder.append("insert into dataset " + connectionRequest.getTargetDataset() + " ");
builder.append(" (" + " for $x in feed-collect ('" + sourceFeedId.getDataverse() + "'" + "," + "'" + sourceFeedId.getEntityName() + "'" + "," + "'" + connectionRequest.getReceivingFeedId().getEntityName() + "'" + "," + "'" + connectionRequest.getSubscriptionLocation().name() + "'" + "," + "'" + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
List<FunctionSignature> functionsToApply = connectionRequest.getFunctionsToApply();
if ((functionsToApply != null) && functionsToApply.isEmpty()) {
builder.append(" return $x");
} else {
Function function;
String rValueName = "x";
String lValueName = "y";
int variableIndex = 0;
for (FunctionSignature appliedFunction : functionsToApply) {
function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
variableIndex++;
switch(function.getLanguage().toUpperCase()) {
case Function.LANGUAGE_AQL:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
case Function.LANGUAGE_JAVA:
builder.append(" let " + "$" + lValueName + variableIndex + ":=" + function.getName() + "(" + "$" + rValueName + ")");
rValueName = lValueName + variableIndex;
break;
}
builder.append("\n");
}
builder.append("return $" + lValueName + variableIndex);
}
builder.append(")");
builder.append(";");
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Connect feed statement translated to\n" + builder.toString());
}
IParser parser = parserFactory.createParser(new StringReader(builder.toString()));
List<Statement> statements;
try {
statements = parser.parse();
query = ((InsertStatement) statements.get(INSERT_STATEMENT_POS)).getQuery();
} catch (CompilationException pe) {
throw new MetadataException(pe);
}
}
Aggregations