use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class QueryTranslator method handleConnectFeedStatement.
private void handleConnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
FeedConnection fc;
ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
String dataverseName = getActiveDataverse(cfs.getDataverseName());
String feedName = cfs.getFeedName();
String datasetName = cfs.getDatasetName().getValue();
String policyName = cfs.getPolicy();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// Check whether feed is alive
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
if (activeEventHandler.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
}
// Transaction handling
MetadataLockManager.INSTANCE.connectFeedBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + feedName);
try {
// validation
FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName, mdTxnCtx);
Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName, metadataProvider.getMetadataTxnContext());
ARecordType outputType = FeedMetadataUtil.getOutputType(feed, feed.getAdapterConfiguration(), ExternalDataConstants.KEY_TYPE_NAME);
List<FunctionSignature> appliedFunctions = cfs.getAppliedFunctions();
for (FunctionSignature func : appliedFunctions) {
if (MetadataManager.INSTANCE.getFunction(mdTxnCtx, func) == null) {
throw new CompilationException(ErrorCode.FEED_CONNECT_FEED_APPLIED_INVALID_FUNCTION, func.getName());
}
}
fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName);
if (fc != null) {
throw new AlgebricksException("Feed" + feedName + " is already connected dataset " + datasetName);
}
fc = new FeedConnection(dataverseName, feedName, datasetName, appliedFunctions, policyName, outputType.toString());
MetadataManager.INSTANCE.addFeedConnection(metadataProvider.getMetadataTxnContext(), fc);
// Increase function reference count.
for (FunctionSignature funcSig : appliedFunctions) {
// The function should be cached in Metadata manager, so this operation is not that expensive.
Function func = MetadataManager.INSTANCE.getFunction(mdTxnCtx, funcSig);
func.reference();
MetadataManager.INSTANCE.updateFunction(mdTxnCtx, func);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
abort(e, e, mdTxnCtx);
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class QueryTranslator method handleCreateIndexStatement.
protected void handleCreateIndexStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
ProgressState progress = ProgressState.NO_PROGRESS;
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
String datasetName = stmtCreateIndex.getDatasetName().getValue();
List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
String indexName = null;
JobSpecification spec = null;
Dataset ds = null;
// For external datasets
List<ExternalFile> externalFilesSnapshot = null;
boolean firstExternalDatasetIndex = false;
boolean filesIndexReplicated = false;
Index filesIndex = null;
boolean datasetLocked = false;
Index index = null;
try {
ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
indexName = stmtCreateIndex.getIndexName().getValue();
index = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getItemTypeDataverseName(), ds.getItemTypeName());
ARecordType aRecordType = (ARecordType) dt.getDatatype();
ARecordType metaRecordType = null;
if (ds.hasMetaPart()) {
Datatype metaDt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
metaRecordType = (ARecordType) metaDt.getDatatype();
}
List<List<String>> indexFields = new ArrayList<>();
List<IAType> indexFieldTypes = new ArrayList<>();
int keyIndex = 0;
for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
IAType fieldType = null;
ARecordType subType = KeyFieldTypeUtil.chooseSource(keySourceIndicators, keyIndex, aRecordType, metaRecordType);
boolean isOpen = subType.isOpen();
int i = 0;
if (fieldExpr.first.size() > 1 && !isOpen) {
while (i < fieldExpr.first.size() - 1 && !isOpen) {
subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
i++;
isOpen = subType.isOpen();
}
}
if (fieldExpr.second == null) {
fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
} else {
if (!stmtCreateIndex.isEnforced()) {
throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first + "\" field without enforcing it's type");
}
if (!isOpen) {
throw new AlgebricksException("Typed index on \"" + fieldExpr.first + "\" field could be created only for open datatype");
}
if (stmtCreateIndex.hasMetaField()) {
throw new AlgebricksException("Typed open index can only be created on the record part");
}
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second, indexName, dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
fieldType = typeMap.get(typeSignature);
}
if (fieldType == null) {
throw new AlgebricksException("Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
}
indexFields.add(fieldExpr.first);
indexFieldTypes.add(fieldType);
++keyIndex;
}
ValidateUtil.validateKeyFields(aRecordType, metaRecordType, indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getIndexType());
if (index != null) {
if (stmtCreateIndex.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
}
}
// error message and stop.
if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
List<List<String>> partitioningKeys = ds.getPrimaryKeys();
for (List<String> partitioningKey : partitioningKeys) {
IAType keyType = aRecordType.getSubFieldType(partitioningKey);
ITypeTraits typeTrait = TypeTraitProvider.INSTANCE.getTypeTrait(keyType);
// If it is not a fixed length
if (typeTrait.getFixedLength() < 0) {
throw new AlgebricksException("The keyword or ngram index -" + indexName + " cannot be created on the dataset -" + datasetName + " due to its variable-length primary key field - " + partitioningKey);
}
}
}
if (ds.getDatasetType() == DatasetType.INTERNAL) {
validateIfResourceIsActiveInFeed(ds);
} else {
// Check if the dataset is indexible
if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
throw new AlgebricksException("dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter() + " Adapter can't be indexed");
}
// Check if the name of the index is valid
if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
throw new AlgebricksException("external dataset index name is invalid");
}
// Check if the files index exist
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
firstExternalDatasetIndex = filesIndex == null;
// Lock external dataset
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
datasetLocked = true;
if (firstExternalDatasetIndex) {
// Verify that no one has created an index before we acquire the lock
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
if (filesIndex != null) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
firstExternalDatasetIndex = false;
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
}
}
if (firstExternalDatasetIndex) {
// Get snapshot from External File System
externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
// Add an entry for the files index
filesIndex = new Index(dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName), IndexType.BTREE, ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES, null, ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false, MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// Add files to the external files index
for (ExternalFile file : externalFilesSnapshot) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
// This is the first index for the external dataset, replicate the files index
spec = ExternalIndexingOperations.buildFilesIndexCreateJobSpec(ds, externalFilesSnapshot, metadataProvider);
if (spec == null) {
throw new CompilationException("Failed to create job spec for replicating Files Index For external dataset");
}
filesIndexReplicated = true;
JobUtils.runJob(hcc, spec, true);
}
}
// check whether there exists another enforced index on the same field
if (stmtCreateIndex.isEnforced()) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
for (Index existingIndex : indexes) {
if (existingIndex.getKeyFieldNames().equals(indexFields) && !existingIndex.getKeyFieldTypes().equals(indexFieldTypes) && existingIndex.isEnforcingKeyFileds()) {
throw new CompilationException("Cannot create index " + indexName + " , enforced index " + existingIndex.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',') + "\" is already defined with type \"" + existingIndex.getKeyFieldTypes() + "\"");
}
}
}
// #. add a new index with PendingAddOp
index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false, MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// #. prepare to create the index artifact in NC.
spec = IndexUtil.buildSecondaryIndexCreationJobSpec(ds, index, metadataProvider);
if (spec == null) {
throw new CompilationException("Failed to create job spec for creating index '" + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. create the index artifact in NC.
JobUtils.runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. load data into the index in NC.
spec = IndexUtil.buildSecondaryIndexLoadingJobSpec(ds, index, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, spec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
index.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// PendingAddOp
if (firstExternalDatasetIndex) {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, filesIndex.getIndexName());
filesIndex.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// update transaction timestamp
((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
// If files index was replicated for external dataset, it should be cleaned up on NC side
if (filesIndexReplicated) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
try {
JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
if (firstExternalDatasetIndex) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop External Files from metadata
MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending files for(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop the files index from metadata
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + IndexingConstants.getFilesIndexName(datasetName) + ") couldn't be removed from the metadata", e);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
if (datasetLocked) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
}
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class AccessMethodUtils method checkFTSearchConstantExpression.
// Checks whether a proper constant expression is in place for the full-text search.
// A proper constant expression in the full-text search should be among string, string type (Un)ordered list.
public static void checkFTSearchConstantExpression(ILogicalExpression constExpression) throws AlgebricksException {
IAObject objectFromExpr = ConstantExpressionUtil.getConstantIaObject(constExpression, null);
String arg2Value;
IACursor oListCursor;
switch(objectFromExpr.getType().getTypeTag()) {
case STRING:
arg2Value = ConstantExpressionUtil.getStringConstant(objectFromExpr);
checkAndGenerateFTSearchExceptionForStringPhrase(arg2Value);
break;
case ARRAY:
oListCursor = ConstantExpressionUtil.getOrderedListConstant(objectFromExpr).getCursor();
checkEachElementInFTSearchListPredicate(oListCursor);
break;
case MULTISET:
oListCursor = ConstantExpressionUtil.getUnorderedListConstant(objectFromExpr).getCursor();
checkEachElementInFTSearchListPredicate(oListCursor);
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_TYPE_UNSUPPORTED, BuiltinFunctions.FULLTEXT_CONTAINS.getName(), objectFromExpr.getType().getTypeTag());
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class BTreeAccessMethod method createSecondaryToPrimaryPlan.
@Override
public ILogicalOperator createSecondaryToPrimaryPlan(Mutable<ILogicalExpression> conditionRef, OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
Dataset dataset = indexSubTree.getDataset();
ARecordType recordType = indexSubTree.getRecordType();
ARecordType metaRecordType = indexSubTree.getMetaRecordType();
// we made sure indexSubTree has datasource scan
AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.getDataSourceRef().getValue();
List<Pair<Integer, Integer>> exprAndVarList = analysisCtx.getIndexExprsFromIndexExprsAndVars(chosenIndex);
int numSecondaryKeys = analysisCtx.getNumberOfMatchedKeys(chosenIndex);
// List of function expressions that will be replaced by the secondary-index search.
// These func exprs will be removed from the select condition at the very end of this method.
Set<ILogicalExpression> replacedFuncExprs = new HashSet<>();
// Info on high and low keys for the BTree search predicate.
ILogicalExpression[] lowKeyExprs = new ILogicalExpression[numSecondaryKeys];
ILogicalExpression[] highKeyExprs = new ILogicalExpression[numSecondaryKeys];
LimitType[] lowKeyLimits = new LimitType[numSecondaryKeys];
LimitType[] highKeyLimits = new LimitType[numSecondaryKeys];
boolean[] lowKeyInclusive = new boolean[numSecondaryKeys];
boolean[] highKeyInclusive = new boolean[numSecondaryKeys];
ILogicalExpression[] constantAtRuntimeExpressions = new ILogicalExpression[numSecondaryKeys];
LogicalVariable[] constAtRuntimeExprVars = new LogicalVariable[numSecondaryKeys];
/* TODO: For now we don't do any sophisticated analysis of the func exprs to come up with "the best" range
* predicate. If we can't figure out how to integrate a certain funcExpr into the current predicate,
* we just bail by setting this flag.*/
boolean couldntFigureOut = false;
boolean doneWithExprs = false;
boolean isEqCondition = false;
BitSet setLowKeys = new BitSet(numSecondaryKeys);
BitSet setHighKeys = new BitSet(numSecondaryKeys);
// Go through the func exprs listed as optimizable by the chosen index,
// and formulate a range predicate on the secondary-index keys.
// checks whether a type casting happened from a real (FLOAT, DOUBLE) value to an INT value
// since we have a round issues when dealing with LT(<) OR GT(>) operator.
boolean realTypeConvertedToIntegerType;
for (Pair<Integer, Integer> exprIndex : exprAndVarList) {
// Position of the field of matchedFuncExprs.get(exprIndex) in the chosen index's indexed exprs.
IOptimizableFuncExpr optFuncExpr = analysisCtx.getMatchedFuncExpr(exprIndex.first);
int keyPos = indexOf(optFuncExpr.getFieldName(0), chosenIndex.getKeyFieldNames());
if (keyPos < 0 && optFuncExpr.getNumLogicalVars() > 1) {
// If we are optimizing a join, the matching field may be the second field name.
keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
}
if (keyPos < 0) {
throw CompilationException.create(ErrorCode.NO_INDEX_FIELD_NAME_FOR_GIVEN_FUNC_EXPR);
}
Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree, probeSubTree);
ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
if (searchKeyExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
constantAtRuntimeExpressions[keyPos] = searchKeyExpr;
constAtRuntimeExprVars[keyPos] = context.newVar();
searchKeyExpr = new VariableReferenceExpression(constAtRuntimeExprVars[keyPos]);
}
realTypeConvertedToIntegerType = returnedSearchKeyExpr.second;
LimitType limit = getLimitType(optFuncExpr, probeSubTree);
//
if (realTypeConvertedToIntegerType) {
if (limit == LimitType.HIGH_EXCLUSIVE) {
limit = LimitType.HIGH_INCLUSIVE;
} else if (limit == LimitType.LOW_EXCLUSIVE) {
limit = LimitType.LOW_INCLUSIVE;
}
}
switch(limit) {
case EQUAL:
{
if (lowKeyLimits[keyPos] == null && highKeyLimits[keyPos] == null) {
lowKeyLimits[keyPos] = highKeyLimits[keyPos] = limit;
lowKeyInclusive[keyPos] = highKeyInclusive[keyPos] = true;
lowKeyExprs[keyPos] = highKeyExprs[keyPos] = searchKeyExpr;
setLowKeys.set(keyPos);
setHighKeys.set(keyPos);
isEqCondition = true;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true && lowKeyExprs[keyPos].equals(searchKeyExpr) && highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
isEqCondition = true;
break;
}
couldntFigureOut = true;
}
// If high and low keys are set, we exit for now.
if (setLowKeys.cardinality() == numSecondaryKeys && setHighKeys.cardinality() == numSecondaryKeys) {
doneWithExprs = true;
}
break;
}
case HIGH_EXCLUSIVE:
{
if (highKeyLimits[keyPos] == null || (highKeyLimits[keyPos] != null && highKeyInclusive[keyPos])) {
highKeyLimits[keyPos] = limit;
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = false;
} else {
// (once from analyzing each side of the join)
if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == false && highKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case HIGH_INCLUSIVE:
{
if (highKeyLimits[keyPos] == null) {
highKeyLimits[keyPos] = limit;
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = true;
} else {
// (once from analyzing each side of the join)
if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case LOW_EXCLUSIVE:
{
if (lowKeyLimits[keyPos] == null || (lowKeyLimits[keyPos] != null && lowKeyInclusive[keyPos])) {
lowKeyLimits[keyPos] = limit;
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = false;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == false && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
case LOW_INCLUSIVE:
{
if (lowKeyLimits[keyPos] == null) {
lowKeyLimits[keyPos] = limit;
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = true;
} else {
// (once from analyzing each side of the join)
if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
break;
}
couldntFigureOut = true;
doneWithExprs = true;
}
break;
}
default:
{
throw new IllegalStateException();
}
}
if (!couldntFigureOut) {
// Remember to remove this funcExpr later.
replacedFuncExprs.add(analysisCtx.getMatchedFuncExpr(exprIndex.first).getFuncExpr());
}
if (doneWithExprs) {
break;
}
}
if (couldntFigureOut) {
return null;
}
// If the select condition contains mixed open/closed intervals on multiple keys, then we make all intervals
// closed to obtain a superset of answers and leave the original selection in place.
boolean primaryIndexPostProccessingIsNeeded = false;
for (int i = 1; i < numSecondaryKeys; ++i) {
if (lowKeyInclusive[i] != lowKeyInclusive[0]) {
Arrays.fill(lowKeyInclusive, true);
primaryIndexPostProccessingIsNeeded = true;
break;
}
}
for (int i = 1; i < numSecondaryKeys; ++i) {
if (highKeyInclusive[i] != highKeyInclusive[0]) {
Arrays.fill(highKeyInclusive, true);
primaryIndexPostProccessingIsNeeded = true;
break;
}
}
// determine cases when prefix search could be applied
for (int i = 1; i < lowKeyExprs.length; i++) {
if (lowKeyLimits[0] == null && lowKeyLimits[i] != null || lowKeyLimits[0] != null && lowKeyLimits[i] == null || highKeyLimits[0] == null && highKeyLimits[i] != null || highKeyLimits[0] != null && highKeyLimits[i] == null) {
numSecondaryKeys--;
primaryIndexPostProccessingIsNeeded = true;
}
}
if (lowKeyLimits[0] == null) {
lowKeyInclusive[0] = true;
}
if (highKeyLimits[0] == null) {
highKeyInclusive[0] = true;
}
// Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index
// search.
// List of variables for the assign.
ArrayList<LogicalVariable> keyVarList = new ArrayList<>();
// List of variables and expressions for the assign.
ArrayList<LogicalVariable> assignKeyVarList = new ArrayList<>();
ArrayList<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<>();
int numLowKeys = createKeyVarsAndExprs(numSecondaryKeys, lowKeyLimits, lowKeyExprs, assignKeyVarList, assignKeyExprList, keyVarList, context, constantAtRuntimeExpressions, constAtRuntimeExprVars);
int numHighKeys = createKeyVarsAndExprs(numSecondaryKeys, highKeyLimits, highKeyExprs, assignKeyVarList, assignKeyExprList, keyVarList, context, constantAtRuntimeExpressions, constAtRuntimeExprVars);
BTreeJobGenParams jobGenParams = new BTreeJobGenParams(chosenIndex.getIndexName(), IndexType.BTREE, dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
jobGenParams.setLowKeyInclusive(lowKeyInclusive[0]);
jobGenParams.setHighKeyInclusive(highKeyInclusive[0]);
jobGenParams.setIsEqCondition(isEqCondition);
jobGenParams.setLowKeyVarList(keyVarList, 0, numLowKeys);
jobGenParams.setHighKeyVarList(keyVarList, numLowKeys, numHighKeys);
ILogicalOperator inputOp = null;
if (!assignKeyVarList.isEmpty()) {
// Assign operator that sets the constant secondary-index search-key fields if necessary.
AssignOperator assignConstantSearchKeys = new AssignOperator(assignKeyVarList, assignKeyExprList);
// Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
assignConstantSearchKeys.getInputs().add(new MutableObject<>(OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
assignConstantSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
inputOp = assignConstantSearchKeys;
} else if (probeSubTree == null) {
//nonpure case
//Make sure that the nonpure function is unpartitioned
ILogicalOperator checkOp = dataSourceOp.getInputs().get(0).getValue();
while (checkOp.getExecutionMode() != ExecutionMode.UNPARTITIONED) {
if (checkOp.getInputs().size() == 1) {
checkOp = checkOp.getInputs().get(0).getValue();
} else {
return null;
}
}
inputOp = dataSourceOp.getInputs().get(0).getValue();
} else {
// All index search keys are variables.
inputOp = probeSubTree.getRoot();
}
ILogicalOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType, metaRecordType, chosenIndex, inputOp, jobGenParams, context, false, retainInput, retainNull);
// Generate the rest of the upstream plan which feeds the search results into the primary index.
AbstractUnnestMapOperator primaryIndexUnnestOp = null;
boolean isPrimaryIndex = chosenIndex.isPrimaryIndex();
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
// External dataset
UnnestMapOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, retainInput, retainNull);
indexSubTree.getDataSourceRef().setValue(externalDataAccessOp);
return externalDataAccessOp;
} else if (!isPrimaryIndex) {
primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType, metaRecordType, secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
// Adds equivalence classes --- one equivalent class between a primary key
// variable and a record field-access expression.
EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, dataSourceOp.getVariables(), recordType, metaRecordType, dataset, context);
} else {
List<Object> primaryIndexOutputTypes = new ArrayList<>();
AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, metaRecordType, primaryIndexOutputTypes);
List<LogicalVariable> scanVariables = dataSourceOp.getVariables();
// If not, we create a new condition based on remaining ones.
if (!primaryIndexPostProccessingIsNeeded) {
List<Mutable<ILogicalExpression>> remainingFuncExprs = new ArrayList<>();
try {
getNewConditionExprs(conditionRef, replacedFuncExprs, remainingFuncExprs);
} catch (CompilationException e) {
return null;
}
// Generate new condition.
if (!remainingFuncExprs.isEmpty()) {
ILogicalExpression pulledCond = createSelectCondition(remainingFuncExprs);
conditionRef.setValue(pulledCond);
} else {
conditionRef.setValue(null);
}
}
// Checks whether LEFT_OUTER_UNNESTMAP operator is required.
boolean leftOuterUnnestMapRequired = false;
if (retainNull && retainInput) {
leftOuterUnnestMapRequired = true;
} else {
leftOuterUnnestMapRequired = false;
}
if (conditionRef.getValue() != null) {
// The job gen parameters are transferred to the actual job gen
// via the UnnestMapOperator's function arguments.
List<Mutable<ILogicalExpression>> primaryIndexFuncArgs = new ArrayList<>();
jobGenParams.writeToFuncArgs(primaryIndexFuncArgs);
// An index search is expressed as an unnest-map over an
// index-search function.
IFunctionInfo primaryIndexSearch = FunctionUtil.getFunctionInfo(BuiltinFunctions.INDEX_SEARCH);
UnnestingFunctionCallExpression primaryIndexSearchFunc = new UnnestingFunctionCallExpression(primaryIndexSearch, primaryIndexFuncArgs);
primaryIndexSearchFunc.setReturnsUniqueValues(true);
if (!leftOuterUnnestMapRequired) {
primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, retainInput);
} else {
primaryIndexUnnestOp = new LeftOuterUnnestMapOperator(scanVariables, new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, true);
}
} else {
if (!leftOuterUnnestMapRequired) {
primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, ((UnnestMapOperator) secondaryIndexUnnestOp).getExpressionRef(), primaryIndexOutputTypes, retainInput);
} else {
primaryIndexUnnestOp = new LeftOuterUnnestMapOperator(scanVariables, ((LeftOuterUnnestMapOperator) secondaryIndexUnnestOp).getExpressionRef(), primaryIndexOutputTypes, true);
}
}
primaryIndexUnnestOp.getInputs().add(new MutableObject<>(inputOp));
// Adds equivalence classes --- one equivalent class between a primary key
// variable and a record field-access expression.
EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, scanVariables, recordType, metaRecordType, dataset, context);
}
return primaryIndexUnnestOp;
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class LangExpressionToPlanTranslator method visit.
@Override
public Pair<ILogicalOperator, LogicalVariable> visit(CallExpr fcall, Mutable<ILogicalOperator> tupSource) throws CompilationException {
LogicalVariable v = context.newVar();
FunctionSignature signature = fcall.getFunctionSignature();
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
Mutable<ILogicalOperator> topOp = tupSource;
for (Expression expr : fcall.getExprList()) {
switch(expr.getKind()) {
case VARIABLE_EXPRESSION:
LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
args.add(new MutableObject<>(new VariableReferenceExpression(var)));
break;
case LITERAL_EXPRESSION:
LiteralExpr val = (LiteralExpr) expr;
args.add(new MutableObject<>(new ConstantExpression(new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue())))));
break;
default:
Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(expr, topOp);
AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
args.add(new MutableObject<>(eo.first));
if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
topOp = eo.second;
}
break;
}
}
AbstractFunctionCallExpression f;
if ((f = lookupUserDefinedFunction(signature, args)) == null) {
f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
}
if (f == null) {
throw new CompilationException(" Unknown function " + signature.getName() + "@" + signature.getArity());
}
// Put hints into function call expr.
if (fcall.hasHints()) {
for (IExpressionAnnotation hint : fcall.getHints()) {
f.getAnnotations().put(hint, hint);
}
}
AssignOperator op = new AssignOperator(v, new MutableObject<>(f));
if (topOp != null) {
op.getInputs().add(topOp);
}
return new Pair<>(op, v);
}
Aggregations