use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class LangExpressionToPlanTranslator method visit.
@Override
public Pair<ILogicalOperator, LogicalVariable> visit(LetClause lc, Mutable<ILogicalOperator> tupSource) throws CompilationException {
LogicalVariable v;
ILogicalOperator returnedOp;
if (lc.getBindingExpr().getKind() == Kind.VARIABLE_EXPRESSION) {
v = context.newVarFromExpression(lc.getVarExpr());
LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
returnedOp = new AssignOperator(v, new MutableObject<>(new VariableReferenceExpression(prev)));
returnedOp.getInputs().add(tupSource);
} else {
v = context.newVarFromExpression(lc.getVarExpr());
Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(lc.getBindingExpr(), tupSource);
returnedOp = new AssignOperator(v, new MutableObject<>(eo.first));
returnedOp.getInputs().add(eo.second);
}
return new Pair<>(returnedOp, v);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class QueryTranslator method handleCreateDatasetStatement.
public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
DatasetDecl dd = (DatasetDecl) stmt;
String dataverseName = getActiveDataverse(dd.getDataverse());
String datasetName = dd.getName().getValue();
DatasetType dsType = dd.getDatasetType();
String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
String itemTypeName = dd.getItemTypeName().getValue();
String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
String metaItemTypeName = dd.getMetaItemTypeName().getValue();
Identifier ngNameId = dd.getNodegroupName();
String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
String compactionPolicy = dd.getCompactionPolicy();
Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
boolean defaultCompactionPolicy = compactionPolicy == null;
boolean temp = dd.getDatasetDetailsDecl().isTemp();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
Dataset dataset = null;
try {
IDatasetDetails datasetDetails = null;
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds != null) {
if (dd.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
}
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
if (dt == null) {
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
if (compactionPolicy == null) {
compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
} else {
validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
}
switch(dd.getDatasetType()) {
case INTERNAL:
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.OBJECT) {
throw new AlgebricksException("Dataset type has to be a record type.");
}
IAType metaItemType = null;
if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
}
if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
throw new AlgebricksException("Dataset meta type has to be a record type.");
}
ARecordType metaRecType = (ARecordType) metaItemType;
List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
ARecordType aRecordType = (ARecordType) itemType;
List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
if (filterField != null) {
ValidateUtil.validateFilterField(aRecordType, filterField);
}
if (compactionPolicy == null && filterField != null) {
// If the dataset has a filter and the user didn't specify a merge
// policy, then we will pick the
// correlated-prefix as the default merge policy.
compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
}
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
break;
case EXTERNAL:
String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
break;
default:
throw new CompilationException("Unknown datatype " + dd.getDatasetType());
}
// #. initialize DatasetIdFactory if it is not initialized.
if (!DatasetIdFactory.isInitialized()) {
DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
}
// #. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL) {
JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
// #. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// #. runJob
JobUtils.runJob(hcc, jobSpec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
// #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
// [Notice]
// As long as we updated(and committed) metadata, we should remove any effect of the job
// because an exception occurs during runJob.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class QueryTranslator method asyncCreateAndRunJob.
private void asyncCreateAndRunJob(IHyracksClientConnection hcc, IStatementCompiler compiler, IMetadataLocker locker, ResultDelivery resultDelivery, String clientContextId, IStatementExecutorContext ctx, ResultSetId resultSetId, MutableBoolean printed) {
Mutable<JobId> jobId = new MutableObject<>(JobId.INVALID);
try {
createAndRunJob(hcc, jobId, compiler, locker, resultDelivery, id -> {
final ResultHandle handle = new ResultHandle(id, resultSetId);
ResultUtil.printStatus(sessionOutput, AbstractQueryApiServlet.ResultStatus.RUNNING);
ResultUtil.printResultHandle(sessionOutput, handle);
synchronized (printed) {
printed.setTrue();
printed.notify();
}
}, clientContextId, ctx);
} catch (Exception e) {
if (JobId.INVALID.equals(jobId.getValue())) {
// compilation failed
ResultUtil.printStatus(sessionOutput, AbstractQueryApiServlet.ResultStatus.FAILED);
ResultUtil.printError(sessionOutput.out(), e);
} else {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, resultDelivery.name() + " job with id " + jobId.getValue() + " " + "failed", e);
}
} finally {
synchronized (printed) {
if (printed.isFalse()) {
printed.setTrue();
printed.notify();
}
}
}
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class MergeAggregationExpressionFactory method createMergeAggregation.
@Override
public ILogicalExpression createMergeAggregation(LogicalVariable originalProducedVar, ILogicalExpression expr, IOptimizationContext env) throws AlgebricksException {
AggregateFunctionCallExpression agg = (AggregateFunctionCallExpression) expr;
FunctionIdentifier fid = agg.getFunctionIdentifier();
VariableReferenceExpression tempVarExpr = new VariableReferenceExpression(originalProducedVar);
List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
Mutable<ILogicalExpression> mutableExpression = new MutableObject<ILogicalExpression>(tempVarExpr);
arguments.add(mutableExpression);
/**
* For global aggregate, the merge function is ALWAYS the same as the original aggregate function.
*/
FunctionIdentifier mergeFid = BuiltinFunctions.isGlobalAggregateFunction(fid) ? fid : BuiltinFunctions.getIntermediateAggregateFunction(fid);
if (mergeFid == null) {
/**
* In this case, no merge function (unimplemented) for the local-side aggregate function
*/
return null;
}
return BuiltinFunctions.makeAggregateFunctionExpression(mergeFid, arguments);
}
use of org.apache.beam.repackaged.core.org.apache.commons.lang3.mutable.MutableObject in project asterixdb by apache.
the class AbstractIntroduceGroupByCombinerRule method opToPush.
private GroupByOperator opToPush(GroupByOperator gbyOp, BookkeepingInfo bi, IOptimizationContext context) throws AlgebricksException {
// Hook up input to new group-by.
Mutable<ILogicalOperator> opRef3 = gbyOp.getInputs().get(0);
ILogicalOperator op3 = opRef3.getValue();
GroupByOperator newGbyOp = new GroupByOperator();
newGbyOp.getInputs().add(new MutableObject<ILogicalOperator>(op3));
// Copy annotations.
Map<String, Object> annotations = newGbyOp.getAnnotations();
annotations.putAll(gbyOp.getAnnotations());
List<LogicalVariable> gbyVars = gbyOp.getGbyVarList();
// Backup nested plans since tryToPushSubplan(...) may mutate them.
List<ILogicalPlan> copiedNestedPlans = new ArrayList<>();
for (ILogicalPlan nestedPlan : gbyOp.getNestedPlans()) {
ILogicalPlan copiedNestedPlan = OperatorManipulationUtil.deepCopy(nestedPlan, gbyOp);
OperatorManipulationUtil.computeTypeEnvironment(copiedNestedPlan, context);
copiedNestedPlans.add(copiedNestedPlan);
}
for (ILogicalPlan p : gbyOp.getNestedPlans()) {
// NOTE: tryToPushSubplan(...) can mutate the nested subplan p.
Pair<Boolean, ILogicalPlan> bip = tryToPushSubplan(p, gbyOp, newGbyOp, bi, gbyVars, context);
if (!bip.first) {
// For now, if we cannot push everything, give up.
// Resets the group-by operator with backup nested plans.
gbyOp.getNestedPlans().clear();
gbyOp.getNestedPlans().addAll(copiedNestedPlans);
return null;
}
ILogicalPlan pushedSubplan = bip.second;
if (pushedSubplan != null) {
newGbyOp.getNestedPlans().add(pushedSubplan);
}
}
ArrayList<LogicalVariable> newOpGbyList = new ArrayList<LogicalVariable>();
ArrayList<LogicalVariable> replGbyList = new ArrayList<LogicalVariable>();
// Find maximal sequence of variable.
for (Map.Entry<GroupByOperator, List<LogicalVariable>> e : bi.modifyGbyMap.entrySet()) {
List<LogicalVariable> varList = e.getValue();
boolean see1 = true;
int sz1 = newOpGbyList.size();
int i = 0;
for (LogicalVariable v : varList) {
if (see1) {
if (i < sz1) {
LogicalVariable v2 = newOpGbyList.get(i);
if (v != v2) {
// cannot linearize
return null;
}
} else {
see1 = false;
newOpGbyList.add(v);
replGbyList.add(context.newVar());
}
i++;
} else {
newOpGbyList.add(v);
replGbyList.add(context.newVar());
}
}
}
// set the vars in the new op
int n = newOpGbyList.size();
for (int i = 0; i < n; i++) {
newGbyOp.addGbyExpression(replGbyList.get(i), new VariableReferenceExpression(newOpGbyList.get(i)));
VariableUtilities.substituteVariables(gbyOp, newOpGbyList.get(i), replGbyList.get(i), false, context);
}
// Sets the global flag to be false.
newGbyOp.setGlobal(false);
// Sets the group all flag.
newGbyOp.setGroupAll(gbyOp.isGroupAll());
return newGbyOp;
}
Aggregations