use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class RangeMapBuilder method verifyRangeOrder.
public static void verifyRangeOrder(IRangeMap rangeMap, boolean ascending) throws CompilationException {
// TODO Add support for composite fields.
int fieldIndex = 0;
int fieldType = rangeMap.getTag(0, 0);
BinaryComparatorFactoryProvider comparatorFactory = BinaryComparatorFactoryProvider.INSTANCE;
IBinaryComparatorFactory bcf = comparatorFactory.getBinaryComparatorFactory(ATypeTag.VALUE_TYPE_MAPPING[fieldType], ascending);
IBinaryComparator comparator = bcf.createBinaryComparator();
int c = 0;
for (int split = 1; split < rangeMap.getSplitCount(); ++split) {
if (fieldType != rangeMap.getTag(fieldIndex, split)) {
throw new CompilationException("Range field contains more than a single type of items (" + fieldType + " and " + rangeMap.getTag(fieldIndex, split) + ").");
}
int previousSplit = split - 1;
try {
c = comparator.compare(rangeMap.getByteArray(fieldIndex, previousSplit), rangeMap.getStartOffset(fieldIndex, previousSplit), rangeMap.getLength(fieldIndex, previousSplit), rangeMap.getByteArray(fieldIndex, split), rangeMap.getStartOffset(fieldIndex, split), rangeMap.getLength(fieldIndex, split));
} catch (HyracksDataException e) {
throw new CompilationException(e);
}
if (c >= 0) {
throw new CompilationException("Range fields are not in sorted order.");
}
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class Dataset method drop.
/**
* Drop this dataset
*
* @param metadataProvider
* metadata provider that can be used to get metadata info and runtimes
* @param mdTxnCtx
* the transaction context
* @param jobsToExecute
* a list of jobs to be executed as part of the drop operation
* @param bActiveTxn
* whether the metadata transaction is ongoing
* @param progress
* a mutable progress state used for error handling during the drop operation
* @param hcc
* a client connection to hyracks master for job execution
* @throws Exception
* if an error occur during the drop process or if the dataset can't be dropped for any reason
*/
public void drop(MetadataProvider metadataProvider, MutableObject<MetadataTransactionContext> mdTxnCtx, List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, MutableObject<ProgressState> progress, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup) throws Exception {
Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<>();
if (getDatasetType() == DatasetType.INTERNAL) {
// prepare job spec(s) that would disconnect any active feeds involving the dataset.
ActiveLifecycleListener activeListener = (ActiveLifecycleListener) metadataProvider.getApplicationContext().getActiveLifecycleListener();
IActiveEntityEventsListener[] activeListeners = activeListener.getNotificationHandler().getEventListeners();
for (IActiveEntityEventsListener listener : activeListeners) {
if (listener.isEntityUsingDataset(this)) {
throw new CompilationException(ErrorCode.COMPILATION_CANT_DROP_ACTIVE_DATASET, RecordUtil.toFullyQualifiedName(dataverseName, datasetName), listener.getEntityId().toString());
}
}
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this));
}
}
jobsToExecute.add(DatasetUtil.dropDatasetJobSpec(this, metadataProvider));
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getMetaItemTypeDataverseName(), getMetaItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
bActiveTxn.setValue(false);
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// # disconnect the feeds
for (Pair<JobSpecification, Boolean> p : disconnectJobList.values()) {
JobUtils.runJob(hcc, p.first, true);
}
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
bActiveTxn.setValue(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
} else {
// External dataset
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(this);
// #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx.getValue(), dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
jobsToExecute.add(IndexUtil.buildDropIndexJobSpec(indexes.get(j), metadataProvider, this));
} else {
jobsToExecute.add(DatasetUtil.buildDropFilesIndexJobSpec(metadataProvider, this));
}
}
// #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(mdTxnCtx.getValue(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(), MetadataUtil.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
bActiveTxn.setValue(false);
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
JobUtils.runJob(hcc, jobSpec, true);
}
if (!indexes.isEmpty()) {
ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(this);
}
mdTxnCtx.setValue(MetadataManager.INSTANCE.beginTransaction());
bActiveTxn.setValue(true);
metadataProvider.setMetadataTxnContext(mdTxnCtx.getValue());
}
// #. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx.getValue(), dataverseName, datasetName);
// Drops the associated nodegroup if it is no longer used by any other dataset.
if (dropCorrespondingNodeGroup) {
MetadataLockManager.INSTANCE.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroupName);
MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx.getValue(), nodeGroupName, true);
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class BTreeResourceFactoryProvider method getResourceFactory.
@Override
public IResourceFactory getResourceFactory(MetadataProvider mdProvider, Dataset dataset, Index index, ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories) throws AlgebricksException {
int[] filterFields = IndexUtil.getFilterFields(dataset, index, filterTypeTraits);
int[] btreeFields = IndexUtil.getBtreeFieldsIfFiltered(dataset, index);
IStorageComponentProvider storageComponentProvider = mdProvider.getStorageComponentProvider();
ITypeTraits[] typeTraits = getTypeTraits(mdProvider, dataset, index, recordType, metaType);
IBinaryComparatorFactory[] cmpFactories = getCmpFactories(mdProvider, dataset, index, recordType, metaType);
int[] bloomFilterFields = getBloomFilterFields(dataset, index);
boolean durable = !dataset.isTemp();
double bloomFilterFalsePositiveRate = mdProvider.getStorageProperties().getBloomFilterFalsePositiveRate();
ILSMOperationTrackerFactory opTrackerFactory = dataset.getIndexOperationTrackerFactory(index);
ILSMIOOperationCallbackFactory ioOpCallbackFactory = dataset.getIoOperationCallbackFactory(index);
IStorageManager storageManager = storageComponentProvider.getStorageManager();
IMetadataPageManagerFactory metadataPageManagerFactory = storageComponentProvider.getMetadataPageManagerFactory();
ILSMIOOperationSchedulerProvider ioSchedulerProvider = storageComponentProvider.getIoOperationSchedulerProvider();
switch(dataset.getDatasetType()) {
case EXTERNAL:
return index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName())) ? new ExternalBTreeLocalResourceFactory(storageManager, typeTraits, cmpFactories, filterTypeTraits, filterCmpFactories, filterFields, opTrackerFactory, ioOpCallbackFactory, metadataPageManagerFactory, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, durable, bloomFilterFields, bloomFilterFalsePositiveRate, false, btreeFields) : new ExternalBTreeWithBuddyLocalResourceFactory(storageManager, typeTraits, cmpFactories, filterTypeTraits, filterCmpFactories, filterFields, opTrackerFactory, ioOpCallbackFactory, metadataPageManagerFactory, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, durable, bloomFilterFields, bloomFilterFalsePositiveRate, false, btreeFields);
case INTERNAL:
AsterixVirtualBufferCacheProvider vbcProvider = new AsterixVirtualBufferCacheProvider(dataset.getDatasetId());
return new LSMBTreeLocalResourceFactory(storageManager, typeTraits, cmpFactories, filterTypeTraits, filterCmpFactories, filterFields, opTrackerFactory, ioOpCallbackFactory, metadataPageManagerFactory, vbcProvider, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, durable, bloomFilterFields, bloomFilterFalsePositiveRate, index.isPrimaryIndex(), btreeFields);
default:
throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_DATASET_TYPE, dataset.getDatasetType().toString());
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class AccessMethodUtils method checkEachElementInFTSearchListPredicate.
/**
* Fetches each element and calls the check for the type and value in the given list using the given cursor.
*/
private static void checkEachElementInFTSearchListPredicate(IACursor oListCursor) throws AlgebricksException {
String argValue;
IAObject element;
while (oListCursor.next()) {
element = oListCursor.get();
if (element.getType() == BuiltinType.ASTRING) {
argValue = ConstantExpressionUtil.getStringConstant(element);
checkAndGenerateFTSearchExceptionForStringPhrase(argValue);
} else {
throw new CompilationException(ErrorCode.COMPILATION_TYPE_UNSUPPORTED, BuiltinFunctions.FULLTEXT_CONTAINS.getName(), element.getType().getTypeTag());
}
}
}
use of org.apache.asterix.common.exceptions.CompilationException in project asterixdb by apache.
the class RecordMergeTypeComputer method mergedNestedType.
private IAType mergedNestedType(String fieldName, IAType fieldType1, IAType fieldType0) throws AlgebricksException {
if (fieldType1.getTypeTag() != ATypeTag.OBJECT || fieldType0.getTypeTag() != ATypeTag.OBJECT) {
throw new CompilationException(ErrorCode.COMPILATION_DUPLICATE_FIELD_NAME, fieldName);
}
ARecordType resultType = (ARecordType) fieldType0;
ARecordType fieldType1Copy = (ARecordType) fieldType1;
for (int i = 0; i < fieldType1Copy.getFieldTypes().length; i++) {
String fname = fieldType1Copy.getFieldNames()[i];
int pos = resultType.getFieldIndex(fname);
if (pos >= 0) {
// If a sub-record do merge, else ignore and let the values decide what to do
if (fieldType1Copy.getFieldTypes()[i].getTypeTag() == ATypeTag.OBJECT) {
IAType[] oldTypes = resultType.getFieldTypes();
oldTypes[pos] = mergedNestedType(fname, fieldType1Copy.getFieldTypes()[i], resultType.getFieldTypes()[pos]);
resultType = new ARecordType(resultType.getTypeName(), resultType.getFieldNames(), oldTypes, resultType.isOpen());
}
} else {
IAType[] combinedFieldTypes = ArrayUtils.addAll(resultType.getFieldTypes().clone(), fieldType1Copy.getFieldTypes()[i]);
resultType = new ARecordType(resultType.getTypeName(), ArrayUtils.addAll(resultType.getFieldNames(), fieldType1Copy.getFieldNames()[i]), combinedFieldTypes, resultType.isOpen());
}
}
return resultType;
}
Aggregations