use of io.cdap.cdap.data.dataset.SystemDatasetInstantiator in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method start.
@BeforeClass
public static void start() throws Exception {
initialize(tmpFolder);
DatasetModuleId moduleId = NAMESPACE_ID.datasetModule("email");
datasetFramework.addModule(moduleId, new EmailTableDefinition.EmailTableModule());
datasetFramework.addInstance("email", MY_TABLE, DatasetProperties.EMPTY);
transactional = Transactions.createTransactional(new MultiThreadDatasetCache(new SystemDatasetInstantiator(datasetFramework), transactionSystemClient, NAMESPACE_ID, Collections.<String, String>emptyMap(), null, null));
transactional.execute(new TxRunnable() {
@Override
public void run(DatasetContext context) throws Exception {
// Accessing dataset instance to perform data operations
EmailTableDefinition.EmailTable table = context.getDataset(MY_TABLE.getDataset());
Assert.assertNotNull(table);
table.writeEmail("email1", "this is the subject", "this is the body", "sljackson@boss.com");
}
});
datasetFramework.addModule(NAMESPACE_ID.datasetModule("TableWrapper"), new TableWrapperDefinition.Module());
}
use of io.cdap.cdap.data.dataset.SystemDatasetInstantiator in project cdap by caskdata.
the class ExploreTableManager method generateDisableStatement.
private String generateDisableStatement(DatasetId datasetId, DatasetSpecification spec) throws ExploreException {
String tableName = tableNaming.getTableName(datasetId, spec.getProperties());
String databaseName = ExploreProperties.getExploreDatabaseName(spec.getProperties());
// If table does not exist, nothing to be done
try {
exploreService.getTableInfo(datasetId.getNamespace(), databaseName, tableName);
} catch (TableNotFoundException e) {
// Ignore exception, since this means table was not found.
return null;
}
try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) {
Dataset dataset = datasetInstantiator.getDataset(datasetId);
try {
if (dataset instanceof FileSet || dataset instanceof PartitionedFileSet) {
// do not drop the explore table that dataset is reusing an existing table
if (FileSetProperties.isUseExisting(spec.getProperties())) {
return null;
}
}
return generateDeleteStatement(dataset, databaseName, tableName);
} finally {
Closeables.closeQuietly(dataset);
}
} catch (IOException e) {
LOG.error("Exception creating dataset classLoaderProvider for dataset {}.", datasetId, e);
throw new ExploreException("Exception instantiating dataset " + datasetId);
}
}
use of io.cdap.cdap.data.dataset.SystemDatasetInstantiator in project cdap by caskdata.
the class ExploreTableManager method updateDataset.
/**
* Update ad-hoc exploration on the given dataset by altering the corresponding Hive table. If exploration has
* not been enabled on the dataset, this will fail. Assumes the dataset actually exists.
*
* @param datasetId the ID of the dataset to enable
* @param spec the specification for the dataset to enable
* @return query handle for creating the Hive table for the dataset
* @throws IllegalArgumentException if some required dataset property like schema is not set
* @throws UnsupportedTypeException if the schema of the dataset is not compatible with Hive
* @throws ExploreException if there was an exception submitting the create table statement
* @throws SQLException if there was a problem with the create table statement
* @throws DatasetNotFoundException if the dataset had to be instantiated, but could not be found
* @throws ClassNotFoundException if the was a missing class when instantiating the dataset
*/
public QueryHandle updateDataset(DatasetId datasetId, DatasetSpecification spec, DatasetSpecification oldSpec) throws IllegalArgumentException, ExploreException, SQLException, UnsupportedTypeException, DatasetNotFoundException, ClassNotFoundException {
String tableName = tableNaming.getTableName(datasetId, spec.getProperties());
String databaseName = ExploreProperties.getExploreDatabaseName(spec.getProperties());
String oldTableName = tableNaming.getTableName(datasetId, oldSpec.getProperties());
String oldDatabaseName = ExploreProperties.getExploreDatabaseName(oldSpec.getProperties());
try {
exploreService.getTableInfo(datasetId.getNamespace(), oldDatabaseName, oldTableName);
} catch (TableNotFoundException e) {
// but the new spec may be explorable, so attempt to enable it
return enableDataset(datasetId, spec, false);
}
List<String> alterStatements;
if (!(oldTableName.equals(tableName) && Objects.equals(oldDatabaseName, databaseName))) {
alterStatements = new ArrayList<>();
// database/table name changed. All we can do is disable the old table and enable the new one
String disableStatement = generateDisableStatement(datasetId, oldSpec);
if (disableStatement != null) {
alterStatements.add(disableStatement);
}
String enableStatement = generateEnableStatement(datasetId, spec, false);
if (enableStatement != null) {
alterStatements.add(enableStatement);
}
} else {
try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) {
Dataset dataset = datasetInstantiator.getDataset(datasetId);
try {
alterStatements = generateAlterStatements(datasetId, tableName, dataset, spec, oldSpec);
} finally {
Closeables.closeQuietly(dataset);
}
} catch (IOException e) {
LOG.error("Exception instantiating dataset {}.", datasetId, e);
throw new ExploreException("Exception while trying to instantiate dataset " + datasetId);
}
}
LOG.trace("alter statements for update: {}", alterStatements);
if (alterStatements == null || alterStatements.isEmpty()) {
return QueryHandle.NO_OP;
}
if (alterStatements.size() == 1) {
return exploreService.execute(datasetId.getParent(), alterStatements.get(0));
}
return exploreService.execute(datasetId.getParent(), alterStatements.toArray(new String[alterStatements.size()]));
}
use of io.cdap.cdap.data.dataset.SystemDatasetInstantiator in project cdap by caskdata.
the class ExploreExecutorHttpHandler method doPartitionOperation.
private void doPartitionOperation(FullHttpRequest request, HttpResponder responder, DatasetId datasetId, PartitionOperation partitionOperation) {
try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) {
Dataset dataset;
try {
dataset = datasetInstantiator.getDataset(datasetId);
} catch (Exception e) {
LOG.error("Exception instantiating dataset {}.", datasetId, e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception instantiating dataset " + datasetId);
return;
}
try {
if (!(dataset instanceof PartitionedFileSet)) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "not a partitioned dataset.");
return;
}
Partitioning partitioning = ((PartitionedFileSet) dataset).getPartitioning();
Reader reader = new InputStreamReader(new ByteBufInputStream(request.content()));
Map<String, String> properties = GSON.fromJson(reader, new TypeToken<Map<String, String>>() {
}.getType());
PartitionKey partitionKey;
try {
partitionKey = PartitionedFileSetArguments.getOutputPartitionKey(properties, partitioning);
} catch (Exception e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "invalid partition key: " + e.getMessage());
return;
}
if (partitionKey == null) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "no partition key was given.");
return;
}
QueryHandle handle = partitionOperation.submitOperation(partitionKey, properties);
if (handle == null) {
return;
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json.toString());
} finally {
Closeables.closeQuietly(dataset);
}
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of io.cdap.cdap.data.dataset.SystemDatasetInstantiator in project cdap by caskdata.
the class MapReduceRunnerTestBase method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
CConfiguration conf = CConfiguration.create();
// allow subclasses to override the following two parameters
Integer txTimeout = Integer.getInteger(TxConstants.Manager.CFG_TX_TIMEOUT);
if (txTimeout != null) {
conf.setInt(TxConstants.Manager.CFG_TX_TIMEOUT, txTimeout);
}
Integer txCleanupInterval = Integer.getInteger(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL);
if (txCleanupInterval != null) {
conf.setInt(TxConstants.Manager.CFG_TX_CLEANUP_INTERVAL, txCleanupInterval);
}
injector = AppFabricTestHelper.getInjector(conf);
txService = injector.getInstance(TransactionManager.class);
txExecutorFactory = injector.getInstance(TransactionExecutorFactory.class);
dsFramework = injector.getInstance(DatasetFramework.class);
datasetCache = new SingleThreadDatasetCache(new SystemDatasetInstantiator(dsFramework, MapReduceRunnerTestBase.class.getClassLoader(), null), injector.getInstance(TransactionSystemClient.class), NamespaceId.DEFAULT, DatasetDefinition.NO_ARGUMENTS, null, null);
metricStore = injector.getInstance(MetricStore.class);
txService.startAndWait();
// Always create the default namespace
injector.getInstance(NamespaceAdmin.class).create(NamespaceMeta.DEFAULT);
}
Aggregations