use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project metacat by Netflix.
the class CatalogThriftHiveMetastore method requestWrapper.
private <R> R requestWrapper(final String methodName, final Object[] args, final ThriftSupplier<R> supplier) throws TException {
final long start = registry.clock().monotonicTime();
registry.counter(registry.createId(Metrics.CounterThrift.name() + "." + methodName)).increment();
try {
log.info("+++ Thrift({}): Calling {}({})", catalogName, methodName, args);
return supplier.get();
} catch (MetacatAlreadyExistsException e) {
log.error(e.getMessage(), e);
throw new AlreadyExistsException(e.getMessage());
} catch (MetacatNotFoundException e) {
log.error(e.getMessage(), e);
throw new NoSuchObjectException(e.getMessage());
} catch (TException e) {
log.error(e.getMessage(), e);
throw e;
} catch (Exception e) {
registry.counter(registry.createId(Metrics.CounterThrift.name() + "." + methodName).withTags(Metrics.statusFailureMap)).increment();
final String message = String.format("%s -- %s failed", e.getMessage(), methodName);
log.error(message, e);
final MetaException me = new MetaException(message);
me.initCause(e);
throw me;
} finally {
final long duration = registry.clock().monotonicTime() - start;
this.registry.timer(Metrics.TimerThriftRequest.name() + "." + methodName).record(duration, TimeUnit.MILLISECONDS);
log.info("+++ Thrift({}): Time taken to complete {} is {} ms", catalogName, methodName, duration);
}
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project drill by apache.
the class DrillHiveMetaStoreClient method getHiveReadEntryHelper.
/** Helper method which gets table metadata. Retries once if the first call to fetch the metadata fails */
protected static HiveReadEntry getHiveReadEntryHelper(final IMetaStoreClient mClient, final String dbName, final String tableName) throws TException {
Table table = null;
try {
table = mClient.getTable(dbName, tableName);
} catch (MetaException | NoSuchObjectException e) {
throw e;
} catch (TException e) {
logger.warn("Failure while attempting to get hive table. Retries once. ", e);
try {
mClient.close();
} catch (Exception ex) {
logger.warn("Failure while attempting to close existing hive metastore connection. May leak connection.", ex);
}
mClient.reconnect();
table = mClient.getTable(dbName, tableName);
}
if (table == null) {
throw new UnknownTableException(String.format("Unable to find table '%s'.", tableName));
}
List<Partition> partitions;
try {
partitions = mClient.listPartitions(dbName, tableName, (short) -1);
} catch (NoSuchObjectException | MetaException e) {
throw e;
} catch (TException e) {
logger.warn("Failure while attempting to get hive partitions. Retries once. ", e);
try {
mClient.close();
} catch (Exception ex) {
logger.warn("Failure while attempting to close existing hive metastore connection. May leak connection.", ex);
}
mClient.reconnect();
partitions = mClient.listPartitions(dbName, tableName, (short) -1);
}
List<HiveTableWrapper.HivePartitionWrapper> hivePartitionWrappers = Lists.newArrayList();
HiveTableWithColumnCache hiveTable = new HiveTableWithColumnCache(table, new ColumnListsCache(table));
for (Partition partition : partitions) {
hivePartitionWrappers.add(createPartitionWithSpecColumns(hiveTable, partition));
}
if (hivePartitionWrappers.isEmpty()) {
hivePartitionWrappers = null;
}
return new HiveReadEntry(new HiveTableWrapper(hiveTable), hivePartitionWrappers);
}
use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project cdap by caskdata.
the class BaseHiveExploreService method getTableInfo.
@Override
public TableInfo getTableInfo(String namespace, @Nullable String databaseName, String table) throws ExploreException, TableNotFoundException {
startAndWait();
// TODO check if the database user is allowed to access if security is enabled
try {
String db = databaseName != null ? databaseName : getHiveDatabase(namespace);
Table tableInfo = getMetaStoreClient().getTable(db, table);
List<FieldSchema> tableFields = tableInfo.getSd().getCols();
// in the storage descriptor. If columns are missing, do a separate call for schema.
if (tableFields == null || tableFields.isEmpty()) {
// don't call .getSchema()... class not found exception if we do in the thrift code...
tableFields = getMetaStoreClient().getFields(db, table);
}
ImmutableList.Builder<TableInfo.ColumnInfo> schemaBuilder = ImmutableList.builder();
Set<String> fieldNames = Sets.newHashSet();
for (FieldSchema column : tableFields) {
schemaBuilder.add(new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment()));
fieldNames.add(column.getName());
}
ImmutableList.Builder<TableInfo.ColumnInfo> partitionKeysBuilder = ImmutableList.builder();
for (FieldSchema column : tableInfo.getPartitionKeys()) {
TableInfo.ColumnInfo columnInfo = new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment());
partitionKeysBuilder.add(columnInfo);
// since they show up when you do a 'describe <table>' command.
if (!fieldNames.contains(column.getName())) {
schemaBuilder.add(columnInfo);
}
}
// its a cdap generated table if it uses our storage handler, or if a property is set on the table.
String cdapName = null;
Map<String, String> tableParameters = tableInfo.getParameters();
if (tableParameters != null) {
cdapName = tableParameters.get(Constants.Explore.CDAP_NAME);
}
// tables created after CDAP 2.6 should set the "cdap.name" property, but older ones
// do not. So also check if it uses a cdap storage handler.
String storageHandler = tableInfo.getParameters().get("storage_handler");
boolean isDatasetTable = cdapName != null || DatasetStorageHandler.class.getName().equals(storageHandler) || StreamStorageHandler.class.getName().equals(storageHandler);
return new TableInfo(tableInfo.getTableName(), tableInfo.getDbName(), tableInfo.getOwner(), (long) tableInfo.getCreateTime() * 1000, (long) tableInfo.getLastAccessTime() * 1000, tableInfo.getRetention(), partitionKeysBuilder.build(), tableInfo.getParameters(), tableInfo.getTableType(), schemaBuilder.build(), tableInfo.getSd().getLocation(), tableInfo.getSd().getInputFormat(), tableInfo.getSd().getOutputFormat(), tableInfo.getSd().isCompressed(), tableInfo.getSd().getNumBuckets(), tableInfo.getSd().getSerdeInfo().getSerializationLib(), tableInfo.getSd().getSerdeInfo().getParameters(), isDatasetTable);
} catch (NoSuchObjectException e) {
throw new TableNotFoundException(e);
} catch (TException e) {
throw new ExploreException(e);
}
}
Aggregations