Search in sources :

Example 46 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class PostgreSqlClient method getColumns.

@Override
public List<JdbcColumnHandle> getColumns(ConnectorSession session, JdbcTableHandle tableHandle) {
    if (tableHandle.getColumns().isPresent()) {
        return tableHandle.getColumns().get();
    }
    checkArgument(tableHandle.isNamedRelation(), "Cannot get columns for %s", tableHandle);
    SchemaTableName schemaTableName = tableHandle.getRequiredNamedRelation().getSchemaTableName();
    try (Connection connection = connectionFactory.openConnection(session)) {
        Map<String, Integer> arrayColumnDimensions = ImmutableMap.of();
        if (getArrayMapping(session) == AS_ARRAY) {
            arrayColumnDimensions = getArrayColumnDimensions(connection, tableHandle);
        }
        try (ResultSet resultSet = getColumns(tableHandle, connection.getMetaData())) {
            int allColumns = 0;
            List<JdbcColumnHandle> columns = new ArrayList<>();
            while (resultSet.next()) {
                allColumns++;
                String columnName = resultSet.getString("COLUMN_NAME");
                JdbcTypeHandle typeHandle = new JdbcTypeHandle(getInteger(resultSet, "DATA_TYPE").orElseThrow(() -> new IllegalStateException("DATA_TYPE is null")), Optional.of(resultSet.getString("TYPE_NAME")), getInteger(resultSet, "COLUMN_SIZE"), getInteger(resultSet, "DECIMAL_DIGITS"), Optional.ofNullable(arrayColumnDimensions.get(columnName)), Optional.empty());
                Optional<ColumnMapping> columnMapping = toColumnMapping(session, connection, typeHandle);
                log.debug("Mapping data type of '%s' column '%s': %s mapped to %s", schemaTableName, columnName, typeHandle, columnMapping);
                // skip unsupported column types
                if (columnMapping.isPresent()) {
                    boolean nullable = (resultSet.getInt("NULLABLE") != columnNoNulls);
                    Optional<String> comment = Optional.ofNullable(resultSet.getString("REMARKS"));
                    columns.add(JdbcColumnHandle.builder().setColumnName(columnName).setJdbcTypeHandle(typeHandle).setColumnType(columnMapping.get().getType()).setNullable(nullable).setComment(comment).build());
                }
                if (columnMapping.isEmpty()) {
                    UnsupportedTypeHandling unsupportedTypeHandling = getUnsupportedTypeHandling(session);
                    verify(unsupportedTypeHandling == IGNORE, "Unsupported type handling is set to %s, but toColumnMapping() returned empty for %s", unsupportedTypeHandling, typeHandle);
                }
            }
            if (columns.isEmpty()) {
                // A table may have no supported columns. In rare cases a table might have no columns at all.
                throw new TableNotFoundException(tableHandle.getSchemaTableName(), format("Table '%s' has no supported columns (all %s columns are not supported)", tableHandle.getSchemaTableName(), allColumns));
            }
            return ImmutableList.copyOf(columns);
        }
    } catch (SQLException e) {
        throw new TrinoException(JDBC_ERROR, e);
    }
}
Also used : SQLException(java.sql.SQLException) Connection(java.sql.Connection) PgConnection(org.postgresql.jdbc.PgConnection) JdbcColumnHandle(io.trino.plugin.jdbc.JdbcColumnHandle) ArrayList(java.util.ArrayList) SchemaTableName(io.trino.spi.connector.SchemaTableName) ImplementAvgFloatingPoint(io.trino.plugin.jdbc.aggregation.ImplementAvgFloatingPoint) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) JdbcTypeHandle(io.trino.plugin.jdbc.JdbcTypeHandle) ResultSet(java.sql.ResultSet) TrinoException(io.trino.spi.TrinoException) TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling(io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling) UnsupportedTypeHandling(io.trino.plugin.jdbc.UnsupportedTypeHandling) StandardColumnMappings.bigintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping) StandardColumnMappings.decimalColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping) ColumnMapping(io.trino.plugin.jdbc.ColumnMapping) StandardColumnMappings.realColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping) StandardColumnMappings.varbinaryColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping) StandardColumnMappings.booleanColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping) StandardColumnMappings.doubleColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping) StandardColumnMappings.smallintColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping) StandardColumnMappings.integerColumnMapping(io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping)

Example 47 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class RedisMetadata method getTableMetadata.

@Override
public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle) {
    SchemaTableName schemaTableName = ((RedisTableHandle) tableHandle).toSchemaTableName();
    ConnectorTableMetadata tableMetadata = getTableMetadata(schemaTableName);
    if (tableMetadata == null) {
        throw new TableNotFoundException(schemaTableName);
    }
    return tableMetadata;
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) SchemaTableName(io.trino.spi.connector.SchemaTableName) ConnectorTableMetadata(io.trino.spi.connector.ConnectorTableMetadata)

Example 48 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class BigQuerySplitManager method createEmptyProjection.

private List<BigQuerySplit> createEmptyProjection(ConnectorSession session, TableId remoteTableId, int actualParallelism, Optional<String> filter) {
    BigQueryClient client = bigQueryClientFactory.create(session);
    log.debug("createEmptyProjection(tableId=%s, actualParallelism=%s, filter=[%s])", remoteTableId, actualParallelism, filter);
    try {
        long numberOfRows;
        if (filter.isPresent()) {
            // count the rows based on the filter
            String sql = client.selectSql(remoteTableId, "COUNT(*)");
            TableResult result = client.query(sql);
            numberOfRows = result.iterateAll().iterator().next().get(0).getLongValue();
        } else {
            // no filters, so we can take the value from the table info when the object is TABLE
            TableInfo tableInfo = client.getTable(remoteTableId).orElseThrow(() -> new TableNotFoundException(new SchemaTableName(remoteTableId.getDataset(), remoteTableId.getTable())));
            if (tableInfo.getDefinition().getType() == TABLE) {
                numberOfRows = tableInfo.getNumRows().longValue();
            } else if (tableInfo.getDefinition().getType() == VIEW) {
                String sql = client.selectSql(remoteTableId, "COUNT(*)");
                TableResult result = client.query(sql);
                numberOfRows = result.iterateAll().iterator().next().get(0).getLongValue();
            } else {
                throw new TrinoException(NOT_SUPPORTED, "Unsupported table type: " + tableInfo.getDefinition().getType());
            }
        }
        long rowsPerSplit = numberOfRows / actualParallelism;
        // need to be added to one fo the split due to integer division
        long remainingRows = numberOfRows - (rowsPerSplit * actualParallelism);
        List<BigQuerySplit> splits = range(0, actualParallelism).mapToObj(ignored -> BigQuerySplit.emptyProjection(rowsPerSplit)).collect(toList());
        splits.set(0, BigQuerySplit.emptyProjection(rowsPerSplit + remainingRows));
        return splits;
    } catch (BigQueryException e) {
        throw new TrinoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, "Failed to compute empty projection", e);
    }
}
Also used : BIGQUERY_FAILED_TO_EXECUTE_QUERY(io.trino.plugin.bigquery.BigQueryErrorCode.BIGQUERY_FAILED_TO_EXECUTE_QUERY) ConnectorSplitManager(io.trino.spi.connector.ConnectorSplitManager) Logger(io.airlift.log.Logger) IntStream.range(java.util.stream.IntStream.range) NodeManager(io.trino.spi.NodeManager) TableId(com.google.cloud.bigquery.TableId) BigQueryException(com.google.cloud.bigquery.BigQueryException) Duration(io.airlift.units.Duration) FixedSplitSource(io.trino.spi.connector.FixedSplitSource) Inject(javax.inject.Inject) NOT_SUPPORTED(io.trino.spi.StandardErrorCode.NOT_SUPPORTED) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) ImmutableList(com.google.common.collect.ImmutableList) VIEW(com.google.cloud.bigquery.TableDefinition.Type.VIEW) ConnectorTableHandle(io.trino.spi.connector.ConnectorTableHandle) Objects.requireNonNull(java.util.Objects.requireNonNull) ColumnHandle(io.trino.spi.connector.ColumnHandle) TableResult(com.google.cloud.bigquery.TableResult) TABLE(com.google.cloud.bigquery.TableDefinition.Type.TABLE) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) TrinoException(io.trino.spi.TrinoException) ConnectorSplitSource(io.trino.spi.connector.ConnectorSplitSource) ConnectorSession(io.trino.spi.connector.ConnectorSession) TupleDomain(io.trino.spi.predicate.TupleDomain) SchemaTableName(io.trino.spi.connector.SchemaTableName) List(java.util.List) Collectors.toList(java.util.stream.Collectors.toList) DynamicFilter(io.trino.spi.connector.DynamicFilter) Optional(java.util.Optional) TableInfo(com.google.cloud.bigquery.TableInfo) ConnectorTransactionHandle(io.trino.spi.connector.ConnectorTransactionHandle) TableNotFoundException(io.trino.spi.connector.TableNotFoundException) TableResult(com.google.cloud.bigquery.TableResult) TrinoException(io.trino.spi.TrinoException) TableInfo(com.google.cloud.bigquery.TableInfo) BigQueryException(com.google.cloud.bigquery.BigQueryException) SchemaTableName(io.trino.spi.connector.SchemaTableName)

Example 49 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class BigQueryClient method getColumns.

public List<BigQueryColumnHandle> getColumns(BigQueryTableHandle tableHandle) {
    TableInfo tableInfo = getTable(tableHandle.getRemoteTableName().toTableId()).orElseThrow(() -> new TableNotFoundException(tableHandle.getSchemaTableName()));
    Schema schema = tableInfo.getDefinition().getSchema();
    if (schema == null) {
        throw new TableNotFoundException(tableHandle.getSchemaTableName(), format("Table '%s' has no schema", tableHandle.getSchemaTableName()));
    }
    return schema.getFields().stream().filter(Conversions::isSupportedType).map(Conversions::toColumnHandle).collect(toImmutableList());
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) Schema(com.google.cloud.bigquery.Schema) TableInfo(com.google.cloud.bigquery.TableInfo)

Example 50 with TableNotFoundException

use of io.trino.spi.connector.TableNotFoundException in project trino by trinodb.

the class ReadSessionCreator method create.

public ReadSession create(ConnectorSession session, TableId remoteTable, List<String> selectedFields, Optional<String> filter, int parallelism) {
    BigQueryClient client = bigQueryClientFactory.create(session);
    TableInfo tableDetails = client.getTable(remoteTable).orElseThrow(() -> new TableNotFoundException(new SchemaTableName(remoteTable.getDataset(), remoteTable.getTable())));
    TableInfo actualTable = getActualTable(client, tableDetails, selectedFields);
    List<String> filteredSelectedFields = selectedFields.stream().filter(BigQueryUtil::validColumnName).collect(toList());
    try (BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.create(session)) {
        ReadSession.TableReadOptions.Builder readOptions = ReadSession.TableReadOptions.newBuilder().addAllSelectedFields(filteredSelectedFields);
        filter.ifPresent(readOptions::setRowRestriction);
        ReadSession readSession = bigQueryReadClient.createReadSession(CreateReadSessionRequest.newBuilder().setParent("projects/" + client.getProjectId()).setReadSession(ReadSession.newBuilder().setDataFormat(DataFormat.AVRO).setTable(toTableResourceName(actualTable.getTableId())).setReadOptions(readOptions)).setMaxStreamCount(parallelism).build());
        return readSession;
    }
}
Also used : TableNotFoundException(io.trino.spi.connector.TableNotFoundException) ReadSession(com.google.cloud.bigquery.storage.v1.ReadSession) TableInfo(com.google.cloud.bigquery.TableInfo) SchemaTableName(io.trino.spi.connector.SchemaTableName) BigQueryReadClient(com.google.cloud.bigquery.storage.v1.BigQueryReadClient)

Aggregations

TableNotFoundException (io.trino.spi.connector.TableNotFoundException)84 SchemaTableName (io.trino.spi.connector.SchemaTableName)65 TrinoException (io.trino.spi.TrinoException)39 Table (io.trino.plugin.hive.metastore.Table)33 ImmutableMap (com.google.common.collect.ImmutableMap)27 ImmutableList (com.google.common.collect.ImmutableList)26 ImmutableList.toImmutableList (com.google.common.collect.ImmutableList.toImmutableList)26 List (java.util.List)25 Optional (java.util.Optional)24 HdfsContext (io.trino.plugin.hive.HdfsEnvironment.HdfsContext)22 Path (org.apache.hadoop.fs.Path)22 ColumnHandle (io.trino.spi.connector.ColumnHandle)21 Map (java.util.Map)21 Objects.requireNonNull (java.util.Objects.requireNonNull)20 ConnectorSession (io.trino.spi.connector.ConnectorSession)19 ColumnMetadata (io.trino.spi.connector.ColumnMetadata)18 TupleDomain (io.trino.spi.predicate.TupleDomain)18 Set (java.util.Set)18 ImmutableMap.toImmutableMap (com.google.common.collect.ImmutableMap.toImmutableMap)17 CatalogSchemaTableName (io.trino.spi.connector.CatalogSchemaTableName)17