use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class HBaseMetricsTable method initializeVars.
private void initializeVars(CConfiguration cConf, DatasetSpecification spec) {
this.scanExecutor = null;
this.rowKeyDistributor = null;
RejectedExecutionHandler callerRunsPolicy = (r, executor) -> {
REJECTION_LOG.info("No more threads in the HBase scan thread pool. Consider increase {}. Performing scan in caller thread {}", Constants.Metrics.METRICS_HBASE_MAX_SCAN_THREADS, Thread.currentThread().getName());
// Runs it from the caller thread
if (!executor.isShutdown()) {
r.run();
}
};
int maxScanThread = cConf.getInt(Constants.Metrics.METRICS_HBASE_MAX_SCAN_THREADS);
// Creates a executor that will shrink to 0 threads if left idle
// Uses daemon thread, hence no need to worry about shutdown
// When all threads are busy, use the caller thread to execute
this.scanExecutor = new ThreadPoolExecutor(0, maxScanThread, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), Threads.createDaemonThreadFactory("metrics-hbase-scanner-%d"), callerRunsPolicy);
this.rowKeyDistributor = new RowKeyDistributorByHashPrefix(new RowKeyDistributorByHashPrefix.OneByteSimpleHash(spec.getIntProperty(Constants.Metrics.METRICS_HBASE_TABLE_SPLITS, 16)));
}
use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class ObjectStoreDefinition method getDataset.
@Override
public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
DatasetSpecification kvTableSpec = spec.getSpecification("objects");
KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader);
TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class);
Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class);
return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader);
}
use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method testMissingSchemaFails.
@Test(expected = IllegalArgumentException.class)
public void testMissingSchemaFails() throws Exception {
DatasetId instanceId = NAMESPACE_ID.dataset("badtable");
datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.EMPTY);
DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
try {
exploreTableManager.enableDataset(instanceId, spec, false);
} finally {
datasetFramework.deleteInstance(instanceId);
}
}
use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method testRecordScannableAndWritableIsOK.
@Test
public void testRecordScannableAndWritableIsOK() throws Exception {
DatasetId instanceId = NAMESPACE_ID.dataset("tabul");
datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.builder().add(DatasetProperties.SCHEMA, Schema.recordOf("intRecord", Schema.Field.of("x", Schema.of(Schema.Type.STRING))).toString()).build());
DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
try {
exploreTableManager.enableDataset(instanceId, spec, false);
runCommand(NAMESPACE_ID, "describe dataset_tabul", true, Lists.newArrayList(new ColumnDesc("col_name", "STRING", 1, "from deserializer"), new ColumnDesc("data_type", "STRING", 2, "from deserializer"), new ColumnDesc("comment", "STRING", 3, "from deserializer")), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("x", "string", "from deserializer"))));
} finally {
datasetFramework.deleteInstance(instanceId);
}
}
use of io.cdap.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class HiveExploreTableTestRun method testNoOpOnMissingSchema.
@Test
public void testNoOpOnMissingSchema() throws Exception {
DatasetId datasetId = NAMESPACE_ID.dataset("noschema");
datasetFramework.addInstance(Table.class.getName(), datasetId, DatasetProperties.EMPTY);
try {
DatasetSpecification spec = datasetFramework.getDatasetSpec(datasetId);
Assert.assertEquals(QueryHandle.NO_OP, exploreTableManager.enableDataset(datasetId, spec, false));
} finally {
datasetFramework.deleteInstance(datasetId);
}
}
Aggregations