Search in sources :

Example 31 with ExploreException

use of co.cask.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method getTableInfo.

@Override
public TableInfo getTableInfo(String namespace, @Nullable String databaseName, String table) throws ExploreException, TableNotFoundException {
    startAndWait();
    // TODO check if the database user is allowed to access if security is enabled
    try {
        String db = databaseName != null ? databaseName : getHiveDatabase(namespace);
        Table tableInfo = getMetaStoreClient().getTable(db, table);
        List<FieldSchema> tableFields = tableInfo.getSd().getCols();
        // in the storage descriptor. If columns are missing, do a separate call for schema.
        if (tableFields == null || tableFields.isEmpty()) {
            // don't call .getSchema()... class not found exception if we do in the thrift code...
            tableFields = getMetaStoreClient().getFields(db, table);
        }
        ImmutableList.Builder<TableInfo.ColumnInfo> schemaBuilder = ImmutableList.builder();
        Set<String> fieldNames = Sets.newHashSet();
        for (FieldSchema column : tableFields) {
            schemaBuilder.add(new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment()));
            fieldNames.add(column.getName());
        }
        ImmutableList.Builder<TableInfo.ColumnInfo> partitionKeysBuilder = ImmutableList.builder();
        for (FieldSchema column : tableInfo.getPartitionKeys()) {
            TableInfo.ColumnInfo columnInfo = new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment());
            partitionKeysBuilder.add(columnInfo);
            // since they show up when you do a 'describe <table>' command.
            if (!fieldNames.contains(column.getName())) {
                schemaBuilder.add(columnInfo);
            }
        }
        // its a cdap generated table if it uses our storage handler, or if a property is set on the table.
        String cdapName = null;
        Map<String, String> tableParameters = tableInfo.getParameters();
        if (tableParameters != null) {
            cdapName = tableParameters.get(Constants.Explore.CDAP_NAME);
        }
        // tables created after CDAP 2.6 should set the "cdap.name" property, but older ones
        // do not. So also check if it uses a cdap storage handler.
        String storageHandler = tableInfo.getParameters().get("storage_handler");
        boolean isDatasetTable = cdapName != null || DatasetStorageHandler.class.getName().equals(storageHandler) || StreamStorageHandler.class.getName().equals(storageHandler);
        return new TableInfo(tableInfo.getTableName(), tableInfo.getDbName(), tableInfo.getOwner(), (long) tableInfo.getCreateTime() * 1000, (long) tableInfo.getLastAccessTime() * 1000, tableInfo.getRetention(), partitionKeysBuilder.build(), tableInfo.getParameters(), tableInfo.getTableType(), schemaBuilder.build(), tableInfo.getSd().getLocation(), tableInfo.getSd().getInputFormat(), tableInfo.getSd().getOutputFormat(), tableInfo.getSd().isCompressed(), tableInfo.getSd().getNumBuckets(), tableInfo.getSd().getSerdeInfo().getSerializationLib(), tableInfo.getSd().getSerdeInfo().getParameters(), isDatasetTable);
    } catch (NoSuchObjectException e) {
        throw new TableNotFoundException(e);
    } catch (TException e) {
        throw new ExploreException(e);
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) ImmutableList(com.google.common.collect.ImmutableList) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ExploreException(co.cask.cdap.explore.service.ExploreException) TableNotFoundException(co.cask.cdap.explore.service.TableNotFoundException) DatasetStorageHandler(co.cask.cdap.hive.datasets.DatasetStorageHandler) TableInfo(co.cask.cdap.proto.TableInfo) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 32 with ExploreException

use of co.cask.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method execute.

@Override
public QueryHandle execute(NamespaceId namespace, String statement, @Nullable Map<String, String> additionalSessionConf) throws ExploreException, SQLException {
    startAndWait();
    try {
        SessionHandle sessionHandle = null;
        OperationHandle operationHandle = null;
        LOG.trace("Got statement '{}' with additional session configuration {}", statement, additionalSessionConf);
        Map<String, String> sessionConf = startSession(namespace, additionalSessionConf);
        String database = getHiveDatabase(namespace.getNamespace());
        try {
            sessionHandle = openHiveSession(sessionConf);
            // Switch database to the one being passed in.
            setCurrentDatabase(database);
            operationHandle = executeAsync(sessionHandle, statement);
            QueryHandle handle = saveReadWriteOperation(operationHandle, sessionHandle, sessionConf, statement, database);
            LOG.trace("Executing statement: {} with handle {}", statement, handle);
            return handle;
        } catch (Throwable e) {
            closeInternal(getQueryHandle(sessionConf), new ReadWriteOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
            throw e;
        }
    } catch (HiveSQLException e) {
        throw getSqlException(e);
    } catch (Throwable e) {
        throw new ExploreException(e);
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) QueryHandle(co.cask.cdap.proto.QueryHandle) OperationHandle(org.apache.hive.service.cli.OperationHandle) ExploreException(co.cask.cdap.explore.service.ExploreException)

Example 33 with ExploreException

use of co.cask.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method getMetaStoreClient.

private IMetaStoreClient getMetaStoreClient() throws ExploreException {
    if (metastoreClientLocal.get() == null) {
        try {
            IMetaStoreClient client = new HiveMetaStoreClient(getHiveConf());
            Supplier<IMetaStoreClient> supplier = Suppliers.ofInstance(client);
            metastoreClientLocal.set(supplier);
            // We use GC of the supplier as a signal for us to know that a thread is gone
            // The supplier is set into the thread local, which will get GC'ed when the thread is gone.
            // Since we use a weak reference key to the supplier that points to the client
            // (in the metastoreClientReferences map), it won't block GC of the supplier instance.
            // We can use the weak reference, which is retrieved through polling the ReferenceQueue,
            // to get back the client and call close() on it.
            metastoreClientReferences.put(new WeakReference<>(supplier, metastoreClientReferenceQueue), client);
        } catch (MetaException e) {
            throw new ExploreException("Error initializing Hive Metastore client", e);
        }
    }
    return metastoreClientLocal.get().get();
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) ExploreException(co.cask.cdap.explore.service.ExploreException)

Example 34 with ExploreException

use of co.cask.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method getColumns.

@Override
public QueryHandle getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws ExploreException, SQLException {
    startAndWait();
    try {
        SessionHandle sessionHandle = null;
        OperationHandle operationHandle = null;
        Map<String, String> sessionConf = startSession();
        String database = getHiveDatabase(schemaPattern);
        try {
            sessionHandle = openHiveSession(sessionConf);
            operationHandle = cliService.getColumns(sessionHandle, catalog, database, tableNamePattern, columnNamePattern);
            QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", database);
            LOG.trace("Retrieving columns: catalog {}, schemaPattern {}, tableNamePattern {}, columnNamePattern {}", catalog, database, tableNamePattern, columnNamePattern);
            return handle;
        } catch (Throwable e) {
            closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
            throw e;
        }
    } catch (HiveSQLException e) {
        throw getSqlException(e);
    } catch (Throwable e) {
        throw new ExploreException(e);
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) QueryHandle(co.cask.cdap.proto.QueryHandle) OperationHandle(org.apache.hive.service.cli.OperationHandle) ExploreException(co.cask.cdap.explore.service.ExploreException)

Example 35 with ExploreException

use of co.cask.cdap.explore.service.ExploreException in project cdap by caskdata.

the class ExploreHttpClient method doAddPartition.

protected QueryHandle doAddPartition(DatasetId datasetInstance, DatasetSpecification spec, PartitionKey key, String path) throws ExploreException {
    Map<String, String> args = new HashMap<>();
    PartitionedFileSetArguments.setOutputPartitionKey(args, key);
    args.put("path", path);
    String tableName = ExploreProperties.getExploreTableName(spec.getProperties());
    String databaseName = ExploreProperties.getExploreDatabaseName(spec.getProperties());
    if (tableName != null) {
        args.put(ExploreProperties.PROPERTY_EXPLORE_TABLE_NAME, tableName);
    }
    if (databaseName != null) {
        args.put(ExploreProperties.PROPERTY_EXPLORE_DATABASE_NAME, databaseName);
    }
    HttpResponse response = doPost(String.format("namespaces/%s/data/explore/datasets/%s/partitions", datasetInstance.getNamespace(), datasetInstance.getDataset()), GSON.toJson(args), null);
    if (response.getResponseCode() == HttpURLConnection.HTTP_OK) {
        return QueryHandle.fromId(parseResponseAsMap(response, "handle"));
    }
    throw new ExploreException(String.format("Cannot add partition with key %s to dataset %s. Reason: %s", key, datasetInstance.toString(), response));
}
Also used : HashMap(java.util.HashMap) HttpResponse(co.cask.common.http.HttpResponse) ExploreException(co.cask.cdap.explore.service.ExploreException)

Aggregations

ExploreException (co.cask.cdap.explore.service.ExploreException)41 QueryHandle (co.cask.cdap.proto.QueryHandle)16 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)14 HttpResponse (co.cask.common.http.HttpResponse)12 OperationHandle (org.apache.hive.service.cli.OperationHandle)12 SessionHandle (org.apache.hive.service.cli.SessionHandle)12 SQLException (java.sql.SQLException)11 IOException (java.io.IOException)9 HandleNotFoundException (co.cask.cdap.explore.service.HandleNotFoundException)6 TableNotFoundException (co.cask.cdap.explore.service.TableNotFoundException)4 Path (javax.ws.rs.Path)4 QueryStatus (co.cask.cdap.proto.QueryStatus)3 FileNotFoundException (java.io.FileNotFoundException)3 TException (org.apache.thrift.TException)3 UnsupportedTypeException (co.cask.cdap.api.data.schema.UnsupportedTypeException)2 NamespaceNotFoundException (co.cask.cdap.common.NamespaceNotFoundException)2 HBaseDDLExecutor (co.cask.cdap.spi.hbase.HBaseDDLExecutor)2 ImmutableList (com.google.common.collect.ImmutableList)2 JsonObject (com.google.gson.JsonObject)2 HashMap (java.util.HashMap)2