use of org.apache.asterix.metadata.entities.DatasourceAdapter in project asterixdb by apache.
the class MetadataCache method addAdapterIfNotExists.
public DatasourceAdapter addAdapterIfNotExists(DatasourceAdapter adapter) {
synchronized (adapters) {
Map<String, DatasourceAdapter> adaptersInDataverse = adapters.get(adapter.getAdapterIdentifier().getNamespace());
if (adaptersInDataverse == null) {
adaptersInDataverse = new HashMap<>();
adapters.put(adapter.getAdapterIdentifier().getNamespace(), adaptersInDataverse);
}
DatasourceAdapter adapterObject = adaptersInDataverse.get(adapter.getAdapterIdentifier().getName());
if (adapterObject == null) {
return adaptersInDataverse.put(adapter.getAdapterIdentifier().getName(), adapter);
}
return null;
}
}
use of org.apache.asterix.metadata.entities.DatasourceAdapter in project asterixdb by apache.
the class ExternalLibraryUtils method installLibraryIfNeeded.
/**
* Each element of a library is installed as part of a transaction. Any
* failure in installing an element does not effect installation of other
* libraries.
*/
protected static void installLibraryIfNeeded(String dataverse, final File libraryDir, Map<String, List<String>> uninstalledLibs) throws Exception {
String libraryName = libraryDir.getName().trim();
List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
// was this library just un-installed?
boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
MetadataTransactionContext mdTxnCtx = null;
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
if (libraryInMetadata != null && !wasUninstalled) {
// exists in metadata and was not un-installed, we return.
// Another place which shows that our metadata transactions are broken
// (we didn't call commit before!!!)
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
}
// Add library
MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new Library(dataverse, libraryName));
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Added library " + libraryName + " to Metadata");
}
// Get the descriptor
String[] libraryDescriptors = libraryDir.list((dir, name) -> name.endsWith(".xml"));
if (libraryDescriptors == null) {
throw new IOException("Unable to list files in directory " + libraryDir);
}
if (libraryDescriptors.length == 0) {
// should be fine. library was installed but its content was not added to metadata
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else if (libraryDescriptors.length > 1) {
throw new IllegalStateException("More than 1 library descriptors defined");
}
ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
// Get the dataverse
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
if (dv == null) {
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse, NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP));
}
// Add functions
if (library.getLibraryFunctions() != null) {
for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
String[] fargs = function.getArguments().trim().split(",");
List<String> args = new ArrayList<>();
for (String arg : fargs) {
args.add(arg);
}
Function f = new Function(dataverse, libraryName + "#" + function.getName().trim(), args.size(), args, function.getReturnType().trim(), function.getDefinition().trim(), library.getLanguage().trim(), function.getFunctionType().trim(), 0);
MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
}
}
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Installed functions in library :" + libraryName);
}
// Add adapters
if (library.getLibraryAdapters() != null) {
for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
String adapterFactoryClass = adapter.getFactoryClass().trim();
String adapterName = libraryName + "#" + adapter.getName().trim();
AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, IDataSourceAdapter.AdapterType.EXTERNAL);
MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Installed adapter: " + adapterName);
}
}
}
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Installed adapters in library :" + libraryName);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (LOGGER.isLoggable(Level.SEVERE)) {
LOGGER.log(Level.SEVERE, "Exception in installing library " + libraryName, e);
}
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
}
use of org.apache.asterix.metadata.entities.DatasourceAdapter in project asterixdb by apache.
the class MetadataNode method getDataverseAdapters.
@Override
public List<DatasourceAdapter> getDataverseAdapters(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getAdapterTupleTranslator(false);
IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<DatasourceAdapter> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
return results;
} catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
use of org.apache.asterix.metadata.entities.DatasourceAdapter in project asterixdb by apache.
the class FeedMetadataUtil method getPrimaryFeedFactoryAndOutput.
@SuppressWarnings("rawtypes")
public static Triple<IAdapterFactory, RecordDescriptor, AdapterType> getPrimaryFeedFactoryAndOutput(Feed feed, FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx) throws AlgebricksException {
// This method needs to be re-visited
String adapterName = null;
DatasourceAdapter adapterEntity = null;
String adapterFactoryClassname = null;
IAdapterFactory adapterFactory = null;
ARecordType adapterOutputType = null;
ARecordType metaType = null;
Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> feedProps = null;
IDataSourceAdapter.AdapterType adapterType = null;
try {
adapterName = feed.getAdapterName();
Map<String, String> configuration = feed.getAdapterConfiguration();
configuration.putAll(policyAccessor.getFeedPolicy());
adapterOutputType = getOutputType(feed, configuration, ExternalDataConstants.KEY_TYPE_NAME);
metaType = getOutputType(feed, configuration, ExternalDataConstants.KEY_META_TYPE_NAME);
ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
// Get adapter from metadata dataset <Metadata dataverse>
adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
// Get adapter from metadata dataset <The feed dataverse>
if (adapterEntity == null) {
adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
}
if (adapterEntity != null) {
adapterType = adapterEntity.getType();
adapterFactoryClassname = adapterEntity.getClassname();
switch(adapterType) {
case INTERNAL:
adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
break;
case EXTERNAL:
String[] anameComponents = adapterName.split("#");
String libraryName = anameComponents[0];
ClassLoader cl = appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
break;
default:
throw new AsterixException("Unknown Adapter type " + adapterType);
}
adapterFactory.setOutputType(adapterOutputType);
adapterFactory.setMetaType(metaType);
adapterFactory.configure(appCtx.getServiceContext(), configuration);
} else {
adapterFactory = AdapterFactoryProvider.getAdapterFactory(appCtx.getServiceContext(), adapterName, configuration, adapterOutputType, metaType);
adapterType = IDataSourceAdapter.AdapterType.INTERNAL;
}
if (metaType == null) {
metaType = getOutputType(feed, configuration, ExternalDataConstants.KEY_META_TYPE_NAME);
}
if (adapterOutputType == null) {
if (!configuration.containsKey(ExternalDataConstants.KEY_TYPE_NAME)) {
throw new AsterixException("Unspecified feed output data type");
}
adapterOutputType = getOutputType(feed, configuration, ExternalDataConstants.KEY_TYPE_NAME);
}
int numOfOutputs = 1;
if (metaType != null) {
numOfOutputs++;
}
if (ExternalDataUtils.isChangeFeed(configuration)) {
// get number of PKs
numOfOutputs += ExternalDataUtils.getNumberOfKeys(configuration);
}
ISerializerDeserializer[] serdes = new ISerializerDeserializer[numOfOutputs];
int i = 0;
serdes[i++] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(adapterOutputType);
if (metaType != null) {
serdes[i++] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
}
if (ExternalDataUtils.isChangeFeed(configuration)) {
getSerdesForPKs(serdes, configuration, metaType, adapterOutputType, i);
}
feedProps = new Triple<>(adapterFactory, new RecordDescriptor(serdes), adapterType);
} catch (Exception e) {
throw new AlgebricksException("unable to create adapter", e);
}
return feedProps;
}
use of org.apache.asterix.metadata.entities.DatasourceAdapter in project asterixdb by apache.
the class MetadataNode method dropDataverse.
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
confirmDataverseCanBeDeleted(jobId, dataverseName);
List<Dataset> dataverseDatasets;
Dataset ds;
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
// Drop all datasets in this dataverse.
for (int i = 0; i < dataverseDatasets.size(); i++) {
ds = dataverseDatasets.get(i);
dropDataset(jobId, dataverseName, ds.getDatasetName());
}
//After dropping datasets, drop datatypes
List<Datatype> dataverseDatatypes;
// As a side effect, acquires an S lock on the 'datatype' dataset
// on behalf of txnId.
dataverseDatatypes = getDataverseDatatypes(jobId, dataverseName);
// Drop all types in this dataverse.
for (int i = 0; i < dataverseDatatypes.size(); i++) {
forceDropDatatype(jobId, dataverseName, dataverseDatatypes.get(i).getDatatypeName());
}
// As a side effect, acquires an S lock on the 'Function' dataset
// on behalf of txnId.
List<Function> dataverseFunctions = getDataverseFunctions(jobId, dataverseName);
// Drop all functions in this dataverse.
for (Function function : dataverseFunctions) {
dropFunction(jobId, new FunctionSignature(dataverseName, function.getName(), function.getArity()));
}
// As a side effect, acquires an S lock on the 'Adapter' dataset
// on behalf of txnId.
List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(jobId, dataverseName);
// Drop all functions in this dataverse.
for (DatasourceAdapter adapter : dataverseAdapters) {
dropAdapter(jobId, dataverseName, adapter.getAdapterIdentifier().getName());
}
List<Feed> dataverseFeeds;
List<FeedConnection> feedConnections;
Feed feed;
dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
// Drop all feeds&connections in this dataverse.
for (int i = 0; i < dataverseFeeds.size(); i++) {
feed = dataverseFeeds.get(i);
feedConnections = getFeedConnections(jobId, dataverseName, feed.getFeedName());
for (FeedConnection feedConnection : feedConnections) {
dropFeedConnection(jobId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
}
dropFeed(jobId, dataverseName, feed.getFeedName());
}
List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
if (feedPolicies != null && feedPolicies.size() > 0) {
// Drop all feed ingestion policies in this dataverse.
for (FeedPolicyEntity feedPolicy : feedPolicies) {
dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
}
}
// Delete the dataverse entry from the 'dataverse' dataset.
ITupleReference searchKey = createTuple(dataverseName);
// As a side effect, acquires an S lock on the 'dataverse' dataset
// on behalf of txnId.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
} else {
throw new MetadataException(e);
}
} catch (ACIDException e) {
throw new MetadataException(e);
}
}
Aggregations