Search in sources :

Example 46 with ExploreException

use of io.cdap.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method getTables.

@Override
public QueryHandle getTables(String catalog, String schemaPattern, String tableNamePattern, List<String> tableTypes) throws ExploreException, SQLException {
    startAndWait();
    try {
        SessionHandle sessionHandle = null;
        OperationHandle operationHandle = null;
        Map<String, String> sessionConf = startSession();
        String database = getHiveDatabase(schemaPattern);
        try {
            sessionHandle = openHiveSession(sessionConf);
            operationHandle = cliService.getTables(sessionHandle, catalog, database, tableNamePattern, tableTypes);
            QueryHandle handle = saveReadOnlyOperation(operationHandle, sessionHandle, sessionConf, "", database);
            LOG.trace("Retrieving tables: catalog {}, schemaNamePattern {}, tableNamePattern {}, tableTypes {}", catalog, database, tableNamePattern, tableTypes);
            return handle;
        } catch (Throwable e) {
            closeInternal(getQueryHandle(sessionConf), new ReadOnlyOperationInfo(sessionHandle, operationHandle, sessionConf, "", database));
            throw e;
        }
    } catch (HiveSQLException e) {
        throw getSqlException(e);
    } catch (Throwable e) {
        throw new ExploreException(e);
    }
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) QueryHandle(io.cdap.cdap.proto.QueryHandle) OperationHandle(org.apache.hive.service.cli.OperationHandle) ExploreException(io.cdap.cdap.explore.service.ExploreException)

Example 47 with ExploreException

use of io.cdap.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method previewResults.

@Override
public List<QueryResult> previewResults(QueryHandle handle) throws ExploreException, HandleNotFoundException, SQLException {
    startAndWait();
    if (inactiveHandleCache.getIfPresent(handle) != null) {
        throw new HandleNotFoundException("Query is inactive.", true);
    }
    OperationInfo operationInfo = getActiveOperationInfo(handle);
    Lock previewLock = operationInfo.getPreviewLock();
    previewLock.lock();
    try {
        File previewFile = operationInfo.getPreviewFile();
        if (previewFile != null) {
            try {
                Reader reader = com.google.common.io.Files.newReader(previewFile, Charsets.UTF_8);
                try {
                    return GSON.fromJson(reader, new TypeToken<List<QueryResult>>() {
                    }.getType());
                } finally {
                    Closeables.closeQuietly(reader);
                }
            } catch (FileNotFoundException e) {
                LOG.error("Could not retrieve preview result file {}", previewFile, e);
                throw new ExploreException(e);
            }
        }
        try {
            // Create preview results for query
            previewFile = new File(previewsDir, handle.getHandle());
            try (FileWriter fileWriter = new FileWriter(previewFile)) {
                List<QueryResult> results = fetchNextResults(handle, PREVIEW_COUNT);
                GSON.toJson(results, fileWriter);
                operationInfo.setPreviewFile(previewFile);
                return results;
            }
        } catch (IOException e) {
            LOG.error("Could not write preview results into file", e);
            throw new ExploreException(e);
        }
    } finally {
        previewLock.unlock();
    }
}
Also used : HandleNotFoundException(io.cdap.cdap.explore.service.HandleNotFoundException) QueryResult(io.cdap.cdap.proto.QueryResult) TypeToken(com.google.common.reflect.TypeToken) FileWriter(java.io.FileWriter) FileNotFoundException(java.io.FileNotFoundException) Reader(java.io.Reader) IOException(java.io.IOException) File(java.io.File) Lock(java.util.concurrent.locks.Lock) ExploreException(io.cdap.cdap.explore.service.ExploreException)

Example 48 with ExploreException

use of io.cdap.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method getTableInfo.

@Override
public TableInfo getTableInfo(String namespace, @Nullable String databaseName, String table) throws ExploreException, TableNotFoundException {
    startAndWait();
    // TODO check if the database user is allowed to access if security is enabled
    try {
        String db = databaseName != null ? databaseName : getHiveDatabase(namespace);
        Table tableInfo = getMetaStoreClient().getTable(db, table);
        List<FieldSchema> tableFields = tableInfo.getSd().getCols();
        // in the storage descriptor. If columns are missing, do a separate call for schema.
        if (tableFields == null || tableFields.isEmpty()) {
            // don't call .getSchema()... class not found exception if we do in the thrift code...
            tableFields = getMetaStoreClient().getFields(db, table);
        }
        ImmutableList.Builder<TableInfo.ColumnInfo> schemaBuilder = ImmutableList.builder();
        Set<String> fieldNames = Sets.newHashSet();
        for (FieldSchema column : tableFields) {
            schemaBuilder.add(new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment()));
            fieldNames.add(column.getName());
        }
        ImmutableList.Builder<TableInfo.ColumnInfo> partitionKeysBuilder = ImmutableList.builder();
        for (FieldSchema column : tableInfo.getPartitionKeys()) {
            TableInfo.ColumnInfo columnInfo = new TableInfo.ColumnInfo(column.getName(), column.getType(), column.getComment());
            partitionKeysBuilder.add(columnInfo);
            // since they show up when you do a 'describe <table>' command.
            if (!fieldNames.contains(column.getName())) {
                schemaBuilder.add(columnInfo);
            }
        }
        // its a cdap generated table if it uses our storage handler, or if a property is set on the table.
        String cdapName = null;
        Map<String, String> tableParameters = tableInfo.getParameters();
        if (tableParameters != null) {
            cdapName = tableParameters.get(Constants.Explore.CDAP_NAME);
        }
        // tables created after CDAP 2.6 should set the "cdap.name" property, but older ones
        // do not. So also check if it uses a cdap storage handler.
        String storageHandler = tableInfo.getParameters().get("storage_handler");
        boolean isDatasetTable = cdapName != null || DatasetStorageHandler.class.getName().equals(storageHandler);
        return new TableInfo(tableInfo.getTableName(), tableInfo.getDbName(), tableInfo.getOwner(), (long) tableInfo.getCreateTime() * 1000, (long) tableInfo.getLastAccessTime() * 1000, tableInfo.getRetention(), partitionKeysBuilder.build(), tableInfo.getParameters(), tableInfo.getTableType(), schemaBuilder.build(), tableInfo.getSd().getLocation(), tableInfo.getSd().getInputFormat(), tableInfo.getSd().getOutputFormat(), tableInfo.getSd().isCompressed(), tableInfo.getSd().getNumBuckets(), tableInfo.getSd().getSerdeInfo().getSerializationLib(), tableInfo.getSd().getSerdeInfo().getParameters(), isDatasetTable);
    } catch (NoSuchObjectException e) {
        throw new TableNotFoundException(e);
    } catch (TException e) {
        throw new ExploreException(e);
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) ImmutableList(com.google.common.collect.ImmutableList) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ExploreException(io.cdap.cdap.explore.service.ExploreException) TableNotFoundException(io.cdap.cdap.explore.service.TableNotFoundException) TableInfo(io.cdap.cdap.proto.TableInfo) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 49 with ExploreException

use of io.cdap.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method doStartSession.

private Map<String, String> doStartSession(@Nullable NamespaceId namespace, @Nullable Map<String, String> additionalSessionConf) throws ExploreException, IOException, NamespaceNotFoundException {
    Map<String, String> sessionConf = new HashMap<>();
    QueryHandle queryHandle = QueryHandle.generate();
    sessionConf.put(Constants.Explore.QUERY_ID, queryHandle.getHandle());
    NamespaceMeta namespaceMeta = null;
    if (namespace != null) {
        try {
            namespaceMeta = namespaceQueryAdmin.get(namespace);
        } catch (NamespaceNotFoundException e) {
            throw e;
        } catch (Exception e) {
            throw new IOException(e);
        }
    }
    String schedulerQueue = namespaceMeta != null ? schedulerQueueResolver.getQueue(namespaceMeta) : schedulerQueueResolver.getDefaultQueue();
    if (schedulerQueue != null && !schedulerQueue.isEmpty()) {
        sessionConf.put(JobContext.QUEUE_NAME, schedulerQueue);
    }
    Transaction tx = startTransaction();
    ConfigurationUtil.set(sessionConf, Constants.Explore.TX_QUERY_KEY, TxnCodec.INSTANCE, tx);
    ConfigurationUtil.set(sessionConf, Constants.Explore.CCONF_KEY, CConfCodec.INSTANCE, cConf);
    ConfigurationUtil.set(sessionConf, Constants.Explore.HCONF_KEY, HConfCodec.INSTANCE, hConf);
    HiveConf hiveConf = createHiveConf();
    if (ExploreServiceUtils.isSparkEngine(hiveConf, additionalSessionConf)) {
        sessionConf.putAll(sparkConf);
    }
    if (UserGroupInformation.isSecurityEnabled()) {
        // make sure RM does not cancel delegation tokens after the query is run
        sessionConf.put("mapreduce.job.complete.cancel.delegation.tokens", "false");
        sessionConf.put("spark.hadoop.mapreduce.job.complete.cancel.delegation.tokens", "false");
        // write the user's credentials to a file, to be used for the query
        File credentialsFile = writeCredentialsFile(queryHandle);
        String credentialsFilePath = credentialsFile.getAbsolutePath();
        // mapreduce.job.credentials.binary is added by Hive only if Kerberos credentials are present and impersonation
        // is enabled. However, in our case we don't have Kerberos credentials for Explore service.
        // Hence it will not be automatically added by Hive, instead we have to add it ourselves.
        sessionConf.put(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, credentialsFilePath);
        // CDAP-8367 We need to set this back to Kerberos if security is enabled. We override it in HiveConf.
        sessionConf.put(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, UserGroupInformation.AuthenticationMethod.KERBEROS.name());
        sessionConf.put(Constants.Explore.SUBMITLOCALTASKVIACHILD, Boolean.FALSE.toString());
        sessionConf.put(Constants.Explore.SUBMITVIACHILD, Boolean.FALSE.toString());
        if (ExploreServiceUtils.isTezEngine(hiveConf, additionalSessionConf)) {
            // Add token file location property for tez if engine is tez
            sessionConf.put("tez.credentials.path", credentialsFilePath);
        }
    }
    if (additionalSessionConf != null) {
        sessionConf.putAll(additionalSessionConf);
    }
    return sessionConf;
}
Also used : Transaction(org.apache.tephra.Transaction) HashMap(java.util.HashMap) NamespaceMeta(io.cdap.cdap.proto.NamespaceMeta) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IOException(java.io.IOException) QueryHandle(io.cdap.cdap.proto.QueryHandle) File(java.io.File) NamespaceNotFoundException(io.cdap.cdap.common.NamespaceNotFoundException) NamespaceNotFoundException(io.cdap.cdap.common.NamespaceNotFoundException) SQLException(java.sql.SQLException) TableNotFoundException(io.cdap.cdap.explore.service.TableNotFoundException) TException(org.apache.thrift.TException) IOException(java.io.IOException) HandleNotFoundException(io.cdap.cdap.explore.service.HandleNotFoundException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TransactionFailureException(org.apache.tephra.TransactionFailureException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) ExploreException(io.cdap.cdap.explore.service.ExploreException) FileNotFoundException(java.io.FileNotFoundException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 50 with ExploreException

use of io.cdap.cdap.explore.service.ExploreException in project cdap by caskdata.

the class BaseHiveExploreService method fetchStatus.

protected QueryStatus fetchStatus(OperationInfo operationInfo) throws ExploreException, HandleNotFoundException, HiveSQLException {
    QueryStatus queryStatus;
    try {
        queryStatus = doFetchStatus(operationInfo.getOperationHandle());
        if (QueryStatus.OpStatus.ERROR.equals(queryStatus.getStatus()) && queryStatus.getErrorMessage() == null) {
            queryStatus = new QueryStatus("Operation failed. See the log for more details.", null);
        }
    } catch (HiveSQLException e) {
        // it means that query execution failed, but we can successfully retrieve the status.
        if (e.getSQLState() != null) {
            queryStatus = new QueryStatus(e.getMessage(), e.getSQLState());
        } else {
            // this is an internal error - we are not able to retrieve the status
            throw new ExploreException(e.getMessage(), e);
        }
    }
    operationInfo.setStatus(queryStatus);
    return queryStatus;
}
Also used : HiveSQLException(org.apache.hive.service.cli.HiveSQLException) QueryStatus(io.cdap.cdap.proto.QueryStatus) ExploreException(io.cdap.cdap.explore.service.ExploreException)

Aggregations

ExploreException (io.cdap.cdap.explore.service.ExploreException)88 QueryHandle (io.cdap.cdap.proto.QueryHandle)34 SQLException (java.sql.SQLException)30 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)30 IOException (java.io.IOException)26 OperationHandle (org.apache.hive.service.cli.OperationHandle)24 SessionHandle (org.apache.hive.service.cli.SessionHandle)24 HttpResponse (io.cdap.common.http.HttpResponse)22 HandleNotFoundException (io.cdap.cdap.explore.service.HandleNotFoundException)14 Path (javax.ws.rs.Path)14 UnsupportedTypeException (io.cdap.cdap.api.data.schema.UnsupportedTypeException)10 TableNotFoundException (io.cdap.cdap.explore.service.TableNotFoundException)10 POST (javax.ws.rs.POST)10 TException (org.apache.thrift.TException)10 JsonSyntaxException (com.google.gson.JsonSyntaxException)8 DatasetManagementException (io.cdap.cdap.api.dataset.DatasetManagementException)8 BadRequestException (io.cdap.cdap.common.BadRequestException)8 DatasetId (io.cdap.cdap.proto.id.DatasetId)8 FileNotFoundException (java.io.FileNotFoundException)8 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)8