use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataBootstrap method insertMetadataDatasets.
/**
* Inserts a metadata dataset to the physical dataset index
* Should be performed on a bootstrap of a new universe
*
* @param mdTxnCtx
* @param indexes
* @throws MetadataException
*/
public static void insertMetadataDatasets(MetadataTransactionContext mdTxnCtx, IMetadataIndex[] indexes) throws MetadataException {
for (int i = 0; i < indexes.length; i++) {
IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, indexes[i].getPartitioningExpr(), indexes[i].getPartitioningExpr(), null, indexes[i].getPartitioningExprType(), false, null, false);
MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(indexes[i].getDataverseName(), indexes[i].getIndexedDatasetName(), indexes[i].getDataverseName(), indexes[i].getPayloadRecordType().getTypeName(), indexes[i].getNodeGroupName(), GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME, GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<String, String>(), DatasetType.INTERNAL, indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP));
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Finished inserting initial datasets.");
}
}
use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataNode method getDatasetNamesPartitionedOnThisNodeGroup.
public List<String> getDatasetNamesPartitionedOnThisNodeGroup(JobId jobId, String nodegroup) throws MetadataException, RemoteException {
//this needs to scan the datasets and return the datasets that use this nodegroup
List<String> nodeGroupDatasets = new ArrayList<>();
List<Dataset> datasets = getAllDatasets(jobId);
for (Dataset set : datasets) {
if (set.getNodeGroupName().equals(nodegroup)) {
nodeGroupDatasets.add(set.getDatasetName());
}
}
return nodeGroupDatasets;
}
use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataNode method dropDataset.
@Override
public void dropDataset(JobId jobId, String dataverseName, String datasetName) throws MetadataException, RemoteException {
Dataset dataset = getDataset(jobId, dataverseName, datasetName);
if (dataset == null) {
throw new MetadataException("Cannot drop dataset '" + datasetName + "' because it doesn't exist.");
}
try {
// Delete entry from the 'datasets' dataset.
ITupleReference searchKey = createTuple(dataverseName, datasetName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'dataset' dataset.
ITupleReference datasetTuple = null;
try {
datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
// Delete entry(s) from the 'indexes' dataset.
List<Index> datasetIndexes = getDatasetIndexes(jobId, dataverseName, datasetName);
if (datasetIndexes != null) {
for (Index index : datasetIndexes) {
dropIndex(jobId, dataverseName, datasetName, index.getIndexName());
}
}
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
// Delete External Files
// As a side effect, acquires an S lock on the 'ExternalFile' dataset
// on behalf of txnId.
List<ExternalFile> datasetFiles = getExternalFiles(jobId, dataset);
if (datasetFiles != null && datasetFiles.size() > 0) {
// Drop all external files in this dataset.
for (ExternalFile file : datasetFiles) {
dropExternalFile(jobId, dataverseName, file.getDatasetName(), file.getFileNumber());
}
}
}
} catch (HyracksDataException hde) {
// artifacts.
if (!hde.getComponent().equals(ErrorCode.HYRACKS) || hde.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException(hde);
}
} finally {
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
}
} catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataNode method dropDataverse.
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
confirmDataverseCanBeDeleted(jobId, dataverseName);
List<Dataset> dataverseDatasets;
Dataset ds;
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
// Drop all datasets in this dataverse.
for (int i = 0; i < dataverseDatasets.size(); i++) {
ds = dataverseDatasets.get(i);
dropDataset(jobId, dataverseName, ds.getDatasetName());
}
//After dropping datasets, drop datatypes
List<Datatype> dataverseDatatypes;
// As a side effect, acquires an S lock on the 'datatype' dataset
// on behalf of txnId.
dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
// Drop all types in this dataverse.
for (int i = 0; i < dataverseDatatypes.size(); i++) {
forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
}
// As a side effect, acquires an S lock on the 'Function' dataset
// on behalf of txnId.
List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
// Drop all functions in this dataverse.
for (Function function : dataverseFunctions) {
dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
}
// As a side effect, acquires an S lock on the 'Adapter' dataset
// on behalf of txnId.
List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
// Drop all functions in this dataverse.
for (DatasourceAdapter adapter : dataverseAdapters) {
dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
}
List<Feed> dataverseFeeds;
List<FeedConnection> feedConnections;
Feed feed;
dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
// Drop all feeds&connections in this dataverse.
for (int i = 0; i < dataverseFeeds.size(); i++) {
feed = dataverseFeeds.get(i);
feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
for (FeedConnection feedConnection : feedConnections) {
dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
}
dropFeed(jobId, dataverseName, feed.getFeedName());
}
List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
if (feedPolicies != null && feedPolicies.size() > 0) {
// Drop all feed ingestion policies in this dataverse.
for (FeedPolicyEntity feedPolicy : feedPolicies) {
dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
}
}
// Delete the dataverse entry from the 'dataverse' dataset.
ITupleReference searchKey = createTuple(dataverseName);
// As a side effect, acquires an S lock on the 'dataverse' dataset
// on behalf of txnId.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
use of org.apache.asterix.metadata.entities.Dataset in project asterixdb by apache.
the class MetadataTransactionContext method dropDataset.
public void dropDataset(String dataverseName, String datasetName) {
Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, null, null, null, null, -1, MetadataUtil.PENDING_NO_OP);
droppedCache.addDatasetIfNotExists(dataset);
logAndApply(new MetadataLogicalOperation(dataset, false));
}
Aggregations