use of org.apache.asterix.metadata.declared.MetadataProvider in project asterixdb by apache.
the class FuzzyEqRule method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
// current operator is INNERJOIN or LEFTOUTERJOIN or SELECT
Mutable<ILogicalExpression> expRef;
if (op.getOperatorTag() == LogicalOperatorTag.INNERJOIN || op.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
expRef = joinOp.getCondition();
} else if (op.getOperatorTag() == LogicalOperatorTag.SELECT) {
SelectOperator selectOp = (SelectOperator) op;
expRef = selectOp.getCondition();
} else {
return false;
}
MetadataProvider metadataProvider = ((MetadataProvider) context.getMetadataProvider());
IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
if (expandFuzzyEq(expRef, context, env, metadataProvider)) {
context.computeAndSetTypeEnvironmentForOperator(op);
return true;
}
return false;
}
use of org.apache.asterix.metadata.declared.MetadataProvider in project asterixdb by apache.
the class GlobalRecoveryManager method recoverDataset.
private MetadataTransactionContext recoverDataset(ICcApplicationContext appCtx, MetadataTransactionContext mdTxnCtx, Dataverse dataverse) throws Exception {
if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
MetadataProvider metadataProvider = new MetadataProvider(appCtx, dataverse, componentProvider);
try {
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName());
for (Dataset dataset : datasets) {
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
// External dataset
// Get indexes
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName());
// Get the state of the dataset
ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
TransactionState datasetState = dsd.getState();
if (!indexes.isEmpty()) {
if (datasetState == TransactionState.BEGIN) {
List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
// 1. delete all pending files
for (ExternalFile file : files) {
if (file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
}
}
// 2. clean artifacts in NCs
metadataProvider.setMetadataTxnContext(mdTxnCtx);
JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(dataset, indexes, metadataProvider);
executeHyracksJob(jobSpec);
// 3. correct the dataset state
((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
} else if (datasetState == TransactionState.READY_TO_COMMIT) {
List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
// if ready to commit, roll forward
// 1. commit indexes in NCs
metadataProvider.setMetadataTxnContext(mdTxnCtx);
JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(dataset, indexes, metadataProvider);
executeHyracksJob(jobSpec);
// 2. add pending files in metadata
for (ExternalFile file : files) {
if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
file.setPendingOp(ExternalFilePendingOp.NO_OP);
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
} else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
// find original file
for (ExternalFile originalFile : files) {
if (originalFile.getFileName().equals(file.getFileName())) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
break;
}
}
} else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
// find original file
for (ExternalFile originalFile : files) {
if (originalFile.getFileName().equals(file.getFileName())) {
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
originalFile.setSize(file.getSize());
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, originalFile);
}
}
}
// 3. correct the dataset state
((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
}
}
}
}
} finally {
metadataProvider.getLocks().unlock();
}
}
return mdTxnCtx;
}
use of org.apache.asterix.metadata.declared.MetadataProvider in project asterixdb by apache.
the class IntroduceSecondaryIndexInsertDeleteRule method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
if (op0.getOperatorTag() != LogicalOperatorTag.DELEGATE_OPERATOR && op0.getOperatorTag() != LogicalOperatorTag.SINK) {
return false;
}
if (op0.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR) {
DelegateOperator eOp = (DelegateOperator) op0;
if (!(eOp.getDelegate() instanceof CommitOperator)) {
return false;
}
}
AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
return false;
}
/** find the record variable */
InsertDeleteUpsertOperator primaryIndexModificationOp = (InsertDeleteUpsertOperator) op0.getInputs().get(0).getValue();
boolean isBulkload = primaryIndexModificationOp.isBulkload();
ILogicalExpression newRecordExpr = primaryIndexModificationOp.getPayloadExpression().getValue();
List<Mutable<ILogicalExpression>> newMetaExprs = primaryIndexModificationOp.getAdditionalNonFilteringExpressions();
LogicalVariable newRecordVar;
LogicalVariable newMetaVar = null;
/**
* inputOp is the assign operator which extracts primary keys from the input
* variables (record or meta)
*/
AbstractLogicalOperator inputOp = (AbstractLogicalOperator) primaryIndexModificationOp.getInputs().get(0).getValue();
newRecordVar = getRecordVar(context, inputOp, newRecordExpr, 0);
if (newMetaExprs != null && !newMetaExprs.isEmpty()) {
if (newMetaExprs.size() > 1) {
throw new AlgebricksException("Number of meta records can't be more than 1. Number of meta records found = " + newMetaExprs.size());
}
newMetaVar = getRecordVar(context, inputOp, newMetaExprs.get(0).getValue(), 1);
}
/*
* At this point, we have the record variable and the insert/delete/upsert operator
* Note: We have two operators:
* 1. An InsertDeleteOperator (primary)
* 2. An IndexInsertDeleteOperator (secondary)
* The current primaryIndexModificationOp is of the first type
*/
DataSource datasetSource = (DataSource) primaryIndexModificationOp.getDataSource();
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
String dataverseName = datasetSource.getId().getDataverseName();
String datasetName = datasetSource.getId().getDatasourceName();
Dataset dataset = mp.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
}
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
return false;
}
// Create operators for secondary index insert / delete.
String itemTypeName = dataset.getItemTypeName();
IAType itemType = mp.findType(dataset.getItemTypeDataverseName(), itemTypeName);
if (itemType.getTypeTag() != ATypeTag.OBJECT) {
throw new AlgebricksException("Only record types can be indexed.");
}
ARecordType recType = (ARecordType) itemType;
// meta type
ARecordType metaType = null;
if (dataset.hasMetaPart()) {
metaType = (ARecordType) mp.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
}
List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
// Set the top operator pointer to the primary IndexInsertDeleteOperator
ILogicalOperator currentTop = primaryIndexModificationOp;
boolean hasSecondaryIndex = false;
// Put an n-gram or a keyword index in the later stage of index-update,
// since TokenizeOperator needs to be involved.
Collections.sort(indexes, (o1, o2) -> o1.getIndexType().ordinal() - o2.getIndexType().ordinal());
// At this point, we have the data type info, and the indexes info as well
int secondaryIndexTotalCnt = indexes.size() - 1;
if (secondaryIndexTotalCnt > 0) {
op0.getInputs().clear();
} else {
return false;
}
// Initialize inputs to the SINK operator Op0 (The SINK) is now without input
// Prepare filtering field information (This is the filter created using the "filter with" key word in the
// create dataset ddl)
List<String> filteringFields = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
List<LogicalVariable> filteringVars;
List<Mutable<ILogicalExpression>> filteringExpressions = null;
if (filteringFields != null) {
// The filter field var already exists. we can simply get it from the insert op
filteringVars = new ArrayList<>();
filteringExpressions = new ArrayList<>();
for (Mutable<ILogicalExpression> filteringExpression : primaryIndexModificationOp.getAdditionalFilteringExpressions()) {
filteringExpression.getValue().getUsedVariables(filteringVars);
for (LogicalVariable var : filteringVars) {
filteringExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
}
}
}
// Replicate Operator is applied only when doing the bulk-load.
ReplicateOperator replicateOp = null;
if (secondaryIndexTotalCnt > 1 && primaryIndexModificationOp.isBulkload()) {
// Split the logical plan into "each secondary index update branch"
// to replicate each <PK,OBJECT> pair.
replicateOp = new ReplicateOperator(secondaryIndexTotalCnt);
replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
context.computeAndSetTypeEnvironmentForOperator(replicateOp);
currentTop = replicateOp;
}
/*
* The two maps are used to store variables to which [casted] field access is assigned.
* One for the beforeOp record and the other for the new record.
* There are two uses for these maps:
* 1. used for shared fields in indexes with overlapping keys.
* 2. used for setting variables of secondary keys for each secondary index operator.
*/
Map<IndexFieldId, LogicalVariable> fieldVarsForBeforeOperation = new HashMap<>();
Map<IndexFieldId, LogicalVariable> fieldVarsForNewRecord = new HashMap<>();
/*
* if the index is enforcing field types (For open indexes), We add a cast
* operator to ensure type safety
*/
try {
if (primaryIndexModificationOp.getOperation() == Kind.INSERT || primaryIndexModificationOp.getOperation() == Kind.UPSERT || /* Actually, delete should not be here but it is now until issue
* https://issues.apache.org/jira/browse/ASTERIXDB-1507
* is solved
*/
primaryIndexModificationOp.getOperation() == Kind.DELETE) {
injectFieldAccessesForIndexes(context, dataset, indexes, fieldVarsForNewRecord, recType, metaType, newRecordVar, newMetaVar, primaryIndexModificationOp, false);
if (replicateOp != null) {
context.computeAndSetTypeEnvironmentForOperator(replicateOp);
}
}
if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) /* Actually, delete should be here but it is not until issue
* https://issues.apache.org/jira/browse/ASTERIXDB-1507
* is solved
*/
{
List<LogicalVariable> beforeOpMetaVars = primaryIndexModificationOp.getBeforeOpAdditionalNonFilteringVars();
LogicalVariable beforeOpMetaVar = beforeOpMetaVars == null ? null : beforeOpMetaVars.get(0);
currentTop = injectFieldAccessesForIndexes(context, dataset, indexes, fieldVarsForBeforeOperation, recType, metaType, primaryIndexModificationOp.getBeforeOpRecordVar(), beforeOpMetaVar, currentTop, true);
}
} catch (AsterixException e) {
throw new AlgebricksException(e);
}
// At first, op1 is the index insert op insertOp
for (Index index : indexes) {
if (!index.isSecondaryIndex()) {
continue;
}
hasSecondaryIndex = true;
// Get the secondary fields names and types
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
List<LogicalVariable> secondaryKeyVars = new ArrayList<>();
List<Mutable<ILogicalExpression>> secondaryExpressions = new ArrayList<>();
List<Mutable<ILogicalExpression>> beforeOpSecondaryExpressions = new ArrayList<>();
ILogicalOperator replicateOutput;
for (int i = 0; i < secondaryKeyFields.size(); i++) {
IndexFieldId indexFieldId = new IndexFieldId(index.getKeyFieldSourceIndicators().get(i), secondaryKeyFields.get(i));
LogicalVariable skVar = fieldVarsForNewRecord.get(indexFieldId);
secondaryKeyVars.add(skVar);
secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(skVar)));
if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
beforeOpSecondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(fieldVarsForBeforeOperation.get(indexFieldId))));
}
}
IndexInsertDeleteUpsertOperator indexUpdate;
if (index.getIndexType() != IndexType.RTREE) {
// Create an expression per key
Mutable<ILogicalExpression> filterExpression = (primaryIndexModificationOp.getOperation() == Kind.UPSERT) ? null : createFilterExpression(secondaryKeyVars, context.getOutputTypeEnvironment(currentTop), index.isEnforcingKeyFileds());
DataSourceIndex dataSourceIndex = new DataSourceIndex(index, dataverseName, datasetName, mp);
// and index type is keyword or n-gram.
if (index.getIndexType() != IndexType.BTREE && primaryIndexModificationOp.isBulkload()) {
// Note: Bulk load case, we don't need to take care of it for upsert operation
// Check whether the index is length-partitioned or not.
// If partitioned, [input variables to TokenizeOperator,
// token, number of token] pairs will be generated and
// fed into the IndexInsertDeleteOperator.
// If not, [input variables, token] pairs will be generated
// and fed into the IndexInsertDeleteOperator.
// Input variables are passed since TokenizeOperator is not an
// filtering operator.
boolean isPartitioned = index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX;
// Create a new logical variable - token
List<LogicalVariable> tokenizeKeyVars = new ArrayList<>();
List<Mutable<ILogicalExpression>> tokenizeKeyExprs = new ArrayList<>();
LogicalVariable tokenVar = context.newVar();
tokenizeKeyVars.add(tokenVar);
tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(tokenVar)));
// Check the field type of the secondary key.
IAType secondaryKeyType;
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), recType);
secondaryKeyType = keyPairType.first;
List<Object> varTypes = new ArrayList<>();
varTypes.add(NonTaggedFormatUtil.getTokenType(secondaryKeyType));
// The type is short, and this does not contain type info.
if (isPartitioned) {
LogicalVariable lengthVar = context.newVar();
tokenizeKeyVars.add(lengthVar);
tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lengthVar)));
varTypes.add(BuiltinType.SHORTWITHOUTTYPEINFO);
}
// TokenizeOperator to tokenize [SK, PK] pairs
TokenizeOperator tokenUpdate = new TokenizeOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, tokenizeKeyVars, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), isPartitioned, varTypes);
tokenUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
context.computeAndSetTypeEnvironmentForOperator(tokenUpdate);
replicateOutput = tokenUpdate;
indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), tokenizeKeyExprs, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(tokenUpdate));
} else {
// When TokenizeOperator is not needed
indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
replicateOutput = indexUpdate;
// We add the necessary expressions for upsert
if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
indexUpdate.setBeforeOpSecondaryKeyExprs(beforeOpSecondaryExpressions);
if (filteringFields != null) {
indexUpdate.setBeforeOpAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(primaryIndexModificationOp.getBeforeOpFilterVar())));
}
}
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
}
} else {
// Get type, dimensions and number of keys
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), recType);
IAType spatialType = keyPairType.first;
boolean isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numKeys = (isPointMBR && isBulkload) ? dimension : dimension * 2;
// Get variables and expressions
List<LogicalVariable> keyVarList = new ArrayList<>();
List<Mutable<ILogicalExpression>> keyExprList = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
LogicalVariable keyVar = context.newVar();
keyVarList.add(keyVar);
AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVars.get(0))));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(dimension)))));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
}
secondaryExpressions.clear();
for (LogicalVariable secondaryKeyVar : keyVarList) {
secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
}
if (isPointMBR && isBulkload) {
//createFieldPermutationForBulkLoadOp(int) for more details.
for (LogicalVariable secondaryKeyVar : keyVarList) {
secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
}
}
AssignOperator assignCoordinates = new AssignOperator(keyVarList, keyExprList);
assignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
context.computeAndSetTypeEnvironmentForOperator(assignCoordinates);
replicateOutput = assignCoordinates;
Mutable<ILogicalExpression> filterExpression = null;
AssignOperator originalAssignCoordinates = null;
// We do something similar for beforeOp key if the operation is an upsert
if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
List<LogicalVariable> originalKeyVarList = new ArrayList<>();
List<Mutable<ILogicalExpression>> originalKeyExprList = new ArrayList<>();
// we don't do any filtering since nulls are expected here and there
for (int i = 0; i < numKeys; i++) {
LogicalVariable keyVar = context.newVar();
originalKeyVarList.add(keyVar);
AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
createMBR.getArguments().add(beforeOpSecondaryExpressions.get(0));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(dimension)))));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
originalKeyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
}
beforeOpSecondaryExpressions.clear();
for (LogicalVariable secondaryKeyVar : originalKeyVarList) {
beforeOpSecondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
}
originalAssignCoordinates = new AssignOperator(originalKeyVarList, originalKeyExprList);
originalAssignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
context.computeAndSetTypeEnvironmentForOperator(originalAssignCoordinates);
} else {
// We must enforce the filter if the originating spatial type is
// nullable.
boolean forceFilter = keyPairType.second;
filterExpression = createFilterExpression(keyVarList, context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
}
DataSourceIndex dataSourceIndex = new DataSourceIndex(index, dataverseName, datasetName, mp);
indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
// set before op secondary key expressions
if (filteringFields != null) {
indexUpdate.setBeforeOpAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(primaryIndexModificationOp.getBeforeOpFilterVar())));
}
// set filtering expressions
indexUpdate.setBeforeOpSecondaryKeyExprs(beforeOpSecondaryExpressions);
// assign --> assign beforeOp values --> secondary index upsert
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(originalAssignCoordinates));
} else {
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
}
}
context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
if (!primaryIndexModificationOp.isBulkload() || secondaryIndexTotalCnt == 1) {
currentTop = indexUpdate;
} else {
replicateOp.getOutputs().add(new MutableObject<>(replicateOutput));
}
if (primaryIndexModificationOp.isBulkload()) {
// For bulk load, we connect all fanned out insert operator to a single SINK operator
op0.getInputs().add(new MutableObject<ILogicalOperator>(indexUpdate));
}
}
if (!hasSecondaryIndex) {
return false;
}
if (!primaryIndexModificationOp.isBulkload()) {
// If this is an upsert, we need to
// Remove the current input to the SINK operator (It is actually already removed above)
op0.getInputs().clear();
// Connect the last index update to the SINK
op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
}
return true;
}
use of org.apache.asterix.metadata.declared.MetadataProvider in project asterixdb by apache.
the class PushFieldAccessRule method propagateFieldAccessRec.
@SuppressWarnings("unchecked")
private boolean propagateFieldAccessRec(Mutable<ILogicalOperator> opRef, IOptimizationContext context, String finalAnnot) throws AlgebricksException {
AssignOperator access = (AssignOperator) opRef.getValue();
Mutable<ILogicalOperator> opRef2 = access.getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
// rewritten into index search.
if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(access, op2) && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
return false;
}
Object annotation = op2.getAnnotations().get(IS_MOVABLE);
if (annotation != null && !((Boolean) annotation)) {
return false;
}
if (tryingToPushThroughSelectionWithSameDataSource(access, op2)) {
return false;
}
if (testAndModifyRedundantOp(access, op2)) {
propagateFieldAccessRec(opRef2, context, finalAnnot);
return true;
}
List<LogicalVariable> usedInAccess = new LinkedList<>();
VariableUtilities.getUsedVariables(access, usedInAccess);
List<LogicalVariable> produced2 = new LinkedList<>();
if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
VariableUtilities.getLiveVariables(op2, produced2);
} else {
VariableUtilities.getProducedVariables(op2, produced2);
}
boolean pushItDown = false;
List<LogicalVariable> inter = new ArrayList<>(usedInAccess);
if (inter.isEmpty()) {
// ground value
return false;
}
inter.retainAll(produced2);
if (inter.isEmpty()) {
pushItDown = true;
} else if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
GroupByOperator g = (GroupByOperator) op2;
List<Pair<LogicalVariable, LogicalVariable>> varMappings = new ArrayList<>();
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : g.getDecorList()) {
ILogicalExpression e = p.second.getValue();
if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
LogicalVariable decorVar = GroupByOperator.getDecorVariable(p);
if (inter.contains(decorVar)) {
inter.remove(decorVar);
LogicalVariable v1 = ((VariableReferenceExpression) e).getVariableReference();
varMappings.add(new Pair<>(decorVar, v1));
}
}
}
if (inter.isEmpty()) {
boolean changed = false;
for (Pair<LogicalVariable, LogicalVariable> m : varMappings) {
LogicalVariable v2 = context.newVar();
LogicalVariable oldVar = access.getVariables().get(0);
g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(oldVar, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v2))));
changed = true;
access.getVariables().set(0, v2);
VariableUtilities.substituteVariables(access, m.first, m.second, context);
}
if (changed) {
context.computeAndSetTypeEnvironmentForOperator(g);
}
usedInAccess.clear();
VariableUtilities.getUsedVariables(access, usedInAccess);
pushItDown = true;
}
}
if (pushItDown) {
if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
Mutable<ILogicalOperator> childOfSubplan = ((NestedTupleSourceOperator) op2).getDataSourceReference().getValue().getInputs().get(0);
pushAccessDown(opRef, op2, childOfSubplan, context, finalAnnot);
return true;
}
if (op2.getInputs().size() == 1 && !op2.hasNestedPlans()) {
pushAccessDown(opRef, op2, op2.getInputs().get(0), context, finalAnnot);
return true;
} else {
for (Mutable<ILogicalOperator> inp : op2.getInputs()) {
HashSet<LogicalVariable> v2 = new HashSet<>();
VariableUtilities.getLiveVariables(inp.getValue(), v2);
if (v2.containsAll(usedInAccess)) {
pushAccessDown(opRef, op2, inp, context, finalAnnot);
return true;
}
}
}
if (op2.hasNestedPlans()) {
AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) op2;
for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
for (Mutable<ILogicalOperator> root : plan.getRoots()) {
HashSet<LogicalVariable> v2 = new HashSet<>();
VariableUtilities.getLiveVariables(root.getValue(), v2);
if (v2.containsAll(usedInAccess)) {
pushAccessDown(opRef, op2, root, context, finalAnnot);
return true;
}
}
}
}
throw new AlgebricksException("Field access " + access.getExpressions().get(0).getValue() + " does not correspond to any input of operator " + op2);
} else {
// fields. If yes, we can equate the two variables.
if (op2.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
DataSourceScanOperator scan = (DataSourceScanOperator) op2;
int n = scan.getVariables().size();
LogicalVariable scanRecordVar = scan.getVariables().get(n - 1);
AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) access.getExpressions().get(0).getValue();
ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
LogicalExpressionTag tag = e0.getExpressionTag();
if (tag == LogicalExpressionTag.VARIABLE) {
VariableReferenceExpression varRef = (VariableReferenceExpression) e0;
if (varRef.getVariableReference() == scanRecordVar) {
ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
if (e1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
IDataSource<DataSourceId> dataSource = (IDataSource<DataSourceId>) scan.getDataSource();
byte dsType = ((DataSource) dataSource).getDatasourceType();
if (dsType == DataSource.Type.FEED || dsType == DataSource.Type.LOADABLE) {
return false;
}
DataSourceId asid = dataSource.getId();
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
if (dataset == null) {
throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
}
if (dataset.getDatasetType() != DatasetType.INTERNAL) {
setAsFinal(access, context, finalAnnot);
return false;
}
ConstantExpression ce = (ConstantExpression) e1;
IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
String fldName;
if (obj.getType().getTypeTag() == ATypeTag.STRING) {
fldName = ((AString) obj).getStringValue();
} else {
int pos = ((AInt32) obj).getIntegerValue();
String tName = dataset.getItemTypeName();
IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName);
if (t.getTypeTag() != ATypeTag.OBJECT) {
return false;
}
ARecordType rt = (ARecordType) t;
if (pos >= rt.getFieldNames().length) {
setAsFinal(access, context, finalAnnot);
return false;
}
fldName = rt.getFieldNames()[pos];
}
int p = DatasetUtil.getPositionOfPartitioningKeyField(dataset, fldName);
if (p < 0) {
// not one of the partitioning fields
setAsFinal(access, context, finalAnnot);
return false;
}
LogicalVariable keyVar = scan.getVariables().get(p);
access.getExpressions().get(0).setValue(new VariableReferenceExpression(keyVar));
return true;
}
}
}
}
setAsFinal(access, context, finalAnnot);
return false;
}
}
use of org.apache.asterix.metadata.declared.MetadataProvider in project asterixdb by apache.
the class SetupCommitExtensionOpRule method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.DELEGATE_OPERATOR) {
return false;
}
DelegateOperator eOp = (DelegateOperator) op;
if (!(eOp.getDelegate() instanceof CommitOperator)) {
return false;
}
boolean isSink = ((CommitOperator) eOp.getDelegate()).isSink();
List<Mutable<ILogicalExpression>> primaryKeyExprs = null;
Dataset dataset = null;
AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) eOp.getInputs().get(0).getValue();
while (descendantOp != null) {
if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE_UPSERT) {
IndexInsertDeleteUpsertOperator operator = (IndexInsertDeleteUpsertOperator) descendantOp;
if (!operator.isBulkload() && operator.getPrevSecondaryKeyExprs() == null) {
primaryKeyExprs = operator.getPrimaryKeyExpressions();
dataset = ((DatasetDataSource) operator.getDataSourceIndex().getDataSource()).getDataset();
break;
}
} else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
InsertDeleteUpsertOperator insertDeleteUpsertOperator = (InsertDeleteUpsertOperator) descendantOp;
if (!insertDeleteUpsertOperator.isBulkload()) {
primaryKeyExprs = insertDeleteUpsertOperator.getPrimaryKeyExpressions();
dataset = ((DatasetDataSource) insertDeleteUpsertOperator.getDataSource()).getDataset();
break;
}
}
if (descendantOp.getInputs().isEmpty()) {
break;
}
descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
}
if (primaryKeyExprs == null) {
return false;
}
//copy primaryKeyExprs
List<LogicalVariable> primaryKeyLogicalVars = new ArrayList<>();
for (Mutable<ILogicalExpression> expr : primaryKeyExprs) {
VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr.getValue();
primaryKeyLogicalVars.add(new LogicalVariable(varRefExpr.getVariableReference().getId()));
}
//get JobId(TransactorId)
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
JobId jobId = mp.getJobId();
//create the logical and physical operator
CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars, isSink);
CommitPOperator commitPOperator = new CommitPOperator(jobId, dataset, primaryKeyLogicalVars, isSink);
commitOperator.setPhysicalOperator(commitPOperator);
//create ExtensionOperator and put the commitOperator in it.
DelegateOperator extensionOperator = new DelegateOperator(commitOperator);
extensionOperator.setPhysicalOperator(commitPOperator);
//update plan link
extensionOperator.getInputs().add(eOp.getInputs().get(0));
context.computeAndSetTypeEnvironmentForOperator(extensionOperator);
opRef.setValue(extensionOperator);
return true;
}
Aggregations