use of io.cdap.cdap.api.data.batch.RecordWritable in project cdap by caskdata.
the class DatasetSerDe method getDatasetSchema.
private void getDatasetSchema(Configuration conf, DatasetId datasetId) throws SerDeException {
try (ContextManager.Context hiveContext = ContextManager.getContext(conf)) {
// Because it calls initialize just to get the object inspector
if (hiveContext == null) {
LOG.info("Hive provided a null conf, will not be able to get dataset schema.");
return;
}
// some datasets like Table and ObjectMappedTable have schema in the dataset properties
try {
DatasetSpecification datasetSpec = hiveContext.getDatasetSpec(datasetId);
String schemaStr = datasetSpec.getProperty("schema");
if (schemaStr != null) {
schema = Schema.parseJson(schemaStr);
return;
}
} catch (DatasetManagementException | ServiceUnavailableException | UnauthorizedException e) {
throw new SerDeException("Could not instantiate dataset " + datasetId, e);
} catch (IOException e) {
throw new SerDeException("Exception getting schema for dataset " + datasetId, e);
}
// other datasets must be instantiated to get their schema
// conf is null if this is a query that writes to a dataset
ClassLoader parentClassLoader = conf == null ? null : conf.getClassLoader();
try (SystemDatasetInstantiator datasetInstantiator = hiveContext.createDatasetInstantiator(parentClassLoader)) {
Dataset dataset = datasetInstantiator.getDataset(datasetId);
if (dataset == null) {
throw new SerDeException("Could not find dataset " + datasetId);
}
Type recordType;
if (dataset instanceof RecordScannable) {
recordType = ((RecordScannable) dataset).getRecordType();
} else if (dataset instanceof RecordWritable) {
recordType = ((RecordWritable) dataset).getRecordType();
} else {
throw new SerDeException("Dataset " + datasetId + " is not explorable.");
}
schema = schemaGenerator.generate(recordType);
} catch (UnsupportedTypeException e) {
throw new SerDeException("Dataset " + datasetId + " has an unsupported schema.", e);
} catch (IOException e) {
throw new SerDeException("Exception while trying to instantiate dataset " + datasetId, e);
}
} catch (IOException e) {
throw new SerDeException("Could not get hive context from configuration.", e);
}
}
use of io.cdap.cdap.api.data.batch.RecordWritable in project cdap by caskdata.
the class ExploreTableManager method generateEnableStatement.
/**
* Generate a Hive DDL statement to create a Hive table for the given dataset.
*
* @param dataset the instantiated dataset
* @param spec the dataset specification
* @param datasetId the dataset id
* @param truncating whether this call to create() is part of a truncate() operation, which is in some
* case implemented using disableExplore() followed by enableExplore()
*
* @return a CREATE TABLE statement, or null if the dataset is not explorable
* @throws UnsupportedTypeException if the dataset is a RecordScannable of a type that is not supported by Hive
*/
@Nullable
private String generateEnableStatement(Dataset dataset, DatasetSpecification spec, DatasetId datasetId, String tableName, boolean truncating) throws UnsupportedTypeException, ExploreException {
String datasetName = datasetId.getDataset();
Map<String, String> serdeProperties = ImmutableMap.of(Constants.Explore.DATASET_NAME, datasetId.getDataset(), Constants.Explore.DATASET_NAMESPACE, datasetId.getNamespace());
// or it must be a FileSet or a PartitionedFileSet with explore enabled in it properties.
if (dataset instanceof Table) {
// valid for a table not to have a schema property. this logic should really be in Table
return generateCreateStatementFromSchemaProperty(spec, datasetId, tableName, serdeProperties, false);
}
if (dataset instanceof ObjectMappedTable) {
return generateCreateStatementFromSchemaProperty(spec, datasetId, tableName, serdeProperties, true);
}
boolean isRecordScannable = dataset instanceof RecordScannable;
boolean isRecordWritable = dataset instanceof RecordWritable;
if (isRecordScannable || isRecordWritable) {
Type recordType = isRecordScannable ? ((RecordScannable) dataset).getRecordType() : ((RecordWritable) dataset).getRecordType();
// Use == because that's what same class means.
if (StructuredRecord.class == recordType) {
return generateCreateStatementFromSchemaProperty(spec, datasetId, tableName, serdeProperties, true);
}
// otherwise, derive the schema from the record type
LOG.debug("Enabling explore for dataset instance {}", datasetName);
String databaseName = ExploreProperties.getExploreDatabaseName(spec.getProperties());
return new CreateStatementBuilder(datasetName, databaseName, tableName, shouldEscapeColumns).setSchema(hiveSchemaFor(recordType)).setTableComment("CDAP Dataset").buildWithStorageHandler(DatasetStorageHandler.class.getName(), serdeProperties);
} else if (dataset instanceof FileSet || dataset instanceof PartitionedFileSet) {
Map<String, String> properties = spec.getProperties();
if (FileSetProperties.isExploreEnabled(properties)) {
LOG.debug("Enabling explore for dataset instance {}", datasetName);
return generateFileSetCreateStatement(datasetId, dataset, properties, truncating);
}
}
// dataset is not explorable
return null;
}
Aggregations