use of io.cdap.cdap.data2.dataset2.DatasetFramework in project cdap by caskdata.
the class JobQueueDatasetTest method checkDatasetType.
@Test
public void checkDatasetType() throws DatasetManagementException {
DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
Assert.assertTrue(dsFramework.hasType(NamespaceId.SYSTEM.datasetType(JobQueueDataset.class.getName())));
}
use of io.cdap.cdap.data2.dataset2.DatasetFramework in project cdap by caskdata.
the class JobQueueDatasetTest method before.
@Before
public void before() throws Exception {
DatasetFramework dsFramework = getInjector().getInstance(DatasetFramework.class);
TransactionSystemClient txClient = getInjector().getInstance(TransactionSystemClient.class);
TransactionExecutorFactory txExecutorFactory = new DynamicTransactionExecutorFactory(txClient);
jobQueue = dsFramework.getDataset(Schedulers.JOB_QUEUE_DATASET_ID, new HashMap<String, String>(), null);
Assert.assertNotNull(jobQueue);
this.txExecutor = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) jobQueue));
}
use of io.cdap.cdap.data2.dataset2.DatasetFramework in project cdap by caskdata.
the class ContextManager method createContext.
// this method is called by the mappers/reducers of jobs launched by Hive.
private static Context createContext(Configuration conf) throws IOException {
// Create context needs to happen only when running in as a MapReduce job.
// In other cases, ContextManager will be initialized using saveContext method.
CConfiguration cConf = ConfigurationUtil.get(conf, Constants.Explore.CCONF_KEY, CConfCodec.INSTANCE);
Configuration hConf = ConfigurationUtil.get(conf, Constants.Explore.HCONF_KEY, HConfCodec.INSTANCE);
Injector injector = createInjector(cConf, hConf);
ZKClientService zkClientService = injector.getInstance(ZKClientService.class);
zkClientService.startAndWait();
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
StreamAdmin streamAdmin = injector.getInstance(StreamAdmin.class);
SystemDatasetInstantiatorFactory datasetInstantiatorFactory = injector.getInstance(SystemDatasetInstantiatorFactory.class);
AuthenticationContext authenticationContext = injector.getInstance(AuthenticationContext.class);
AuthorizationEnforcer authorizationEnforcer = injector.getInstance(AuthorizationEnforcer.class);
return new Context(datasetFramework, streamAdmin, zkClientService, datasetInstantiatorFactory, authenticationContext, authorizationEnforcer);
}
use of io.cdap.cdap.data2.dataset2.DatasetFramework in project cdap by caskdata.
the class LocalRouteStoreTest method beforeClass.
@BeforeClass
public static void beforeClass() throws DatasetManagementException, IOException {
CConfiguration cConf = CConfiguration.create();
cConf.set(Constants.CFG_LOCAL_DATA_DIR, TEMP_FOLDER.newFolder().getAbsolutePath());
txManager = new TransactionManager(new Configuration());
txManager.startAndWait();
Injector injector = Guice.createInjector(new ConfigModule(cConf), new LocationRuntimeModule().getInMemoryModules(), new SystemDatasetRuntimeModule().getInMemoryModules(), new AbstractModule() {
@Override
protected void configure() {
install(new FactoryModuleBuilder().implement(DatasetDefinitionRegistry.class, DefaultDatasetDefinitionRegistry.class).build(DatasetDefinitionRegistryFactory.class));
bind(DatasetFramework.class).to(InMemoryDatasetFramework.class);
bind(NamespaceQueryAdmin.class).to(InMemoryNamespaceClient.class).in(Scopes.SINGLETON);
}
});
datasetFramework = injector.getInstance(DatasetFramework.class);
}
use of io.cdap.cdap.data2.dataset2.DatasetFramework in project cdap by caskdata.
the class ProgramNotificationSubscriberServiceTest method testAppSpecNotRequiredToWriteState.
@Test
public void testAppSpecNotRequiredToWriteState() throws Exception {
Injector injector = AppFabricTestHelper.getInjector();
CConfiguration cConf = injector.getInstance(CConfiguration.class);
ProgramNotificationSubscriberService programNotificationSubscriberService = injector.getInstance(ProgramNotificationSubscriberService.class);
programNotificationSubscriberService.startAndWait();
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
TransactionExecutorFactory txExecutorFactory = injector.getInstance(TransactionExecutorFactory.class);
DatasetId storeTable = NamespaceId.SYSTEM.dataset(Constants.AppMetaStore.TABLE);
Table table = DatasetsUtil.getOrCreateDataset(datasetFramework, storeTable, Table.class.getName(), DatasetProperties.EMPTY, Collections.<String, String>emptyMap());
final AppMetadataStore metadataStoreDataset = new AppMetadataStore(table, cConf, new AtomicBoolean(false));
final TransactionExecutor txnl = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) metadataStoreDataset));
ProgramStateWriter programStateWriter = injector.getInstance(ProgramStateWriter.class);
ProgramId programId = NamespaceId.DEFAULT.app("someapp").program(ProgramType.SERVICE, "s");
ProgramOptions programOptions = new SimpleProgramOptions(programId);
final ProgramRunId runId = programId.run(RunIds.generate());
programStateWriter.start(runId, programOptions, null);
Tasks.waitFor(ProgramRunStatus.STARTING, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
programStateWriter.running(runId, UUID.randomUUID().toString());
Tasks.waitFor(ProgramRunStatus.RUNNING, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
programStateWriter.killed(runId);
Tasks.waitFor(ProgramRunStatus.KILLED, () -> txnl.execute(() -> {
RunRecordMeta meta = metadataStoreDataset.getRun(runId);
return meta == null ? null : meta.getStatus();
}), 10, TimeUnit.SECONDS);
}
Aggregations