Search in sources :

Example 1 with SchemaNotFoundException

use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.

the class CarbondataMetadata method updateEmptyCarbondataTableStorePath.

private void updateEmptyCarbondataTableStorePath(ConnectorSession session, String schemaName) throws IOException {
    FileSystem fileSystem;
    String targetLocation;
    if (StringUtils.isEmpty(carbondataTableStore)) {
        Database database = metastore.getDatabase(defaultDBName).orElseThrow(() -> new SchemaNotFoundException(defaultDBName));
        String tableStore = database.getLocation().get();
        /* if path not having prefix with filesystem type, than we will take fileSystem type (ex:hdfs,file:) from core-site.xml using below methods */
        fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session, schemaName), new Path(tableStore));
        targetLocation = fileSystem.getFileStatus(new Path(tableStore)).getPath().toString();
        carbondataTableStore = targetLocation.endsWith(File.separator) ? (targetLocation + carbondataStorageFolderName) : (targetLocation + File.separator + carbondataStorageFolderName);
    } else {
        fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session, schemaName), new Path(carbondataTableStore));
        carbondataTableStore = fileSystem.getFileStatus(new Path(carbondataTableStore)).getPath().toString();
    }
}
Also used : CarbonTablePath(org.apache.carbondata.core.util.path.CarbonTablePath) Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) Database(io.prestosql.plugin.hive.metastore.Database) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException)

Example 2 with SchemaNotFoundException

use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.

the class CarbondataMetadata method createTable.

@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) {
    SchemaTableName localSchemaTableName = tableMetadata.getTable();
    String localSchemaName = localSchemaTableName.getSchemaName();
    String tableName = localSchemaTableName.getTableName();
    this.user = session.getUser();
    this.schemaName = localSchemaName;
    currentState = State.CREATE_TABLE;
    List<String> partitionedBy = new ArrayList<String>();
    List<SortingColumn> sortBy = new ArrayList<SortingColumn>();
    List<HiveColumnHandle> columnHandles = new ArrayList<HiveColumnHandle>();
    Map<String, String> tableProperties = new HashMap<String, String>();
    getParametersForCreateTable(session, tableMetadata, partitionedBy, sortBy, columnHandles, tableProperties);
    metastore.getDatabase(localSchemaName).orElseThrow(() -> new SchemaNotFoundException(localSchemaName));
    BaseStorageFormat hiveStorageFormat = CarbondataTableProperties.getCarbondataStorageFormat(tableMetadata.getProperties());
    // it will get final path to create carbon table
    LocationHandle locationHandle = getCarbonDataTableCreationPath(session, tableMetadata, HiveWriteUtils.OpertionType.CREATE_TABLE);
    Path targetPath = locationService.getQueryWriteInfo(locationHandle).getTargetPath();
    AbsoluteTableIdentifier finalAbsoluteTableIdentifier = AbsoluteTableIdentifier.from(targetPath.toString(), new CarbonTableIdentifier(localSchemaName, tableName, UUID.randomUUID().toString()));
    hdfsEnvironment.doAs(session.getUser(), () -> {
        initialConfiguration = ConfigurationUtils.toJobConf(this.hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session, localSchemaName, tableName), new Path(locationHandle.getJsonSerializableTargetPath())));
        CarbondataMetadataUtils.createMetaDataFolderSchemaFile(hdfsEnvironment, session, columnHandles, finalAbsoluteTableIdentifier, partitionedBy, sortBy.stream().map(s -> s.getColumnName().toLowerCase(Locale.ENGLISH)).collect(toList()), targetPath.toString(), initialConfiguration);
        this.tableStorageLocation = Optional.of(targetPath.toString());
        try {
            Map<String, String> serdeParameters = initSerDeProperties(tableName);
            Table localTable = buildTableObject(session.getQueryId(), localSchemaName, tableName, session.getUser(), columnHandles, hiveStorageFormat, partitionedBy, Optional.empty(), tableProperties, targetPath, // carbon table is set as external table
            true, prestoVersion, serdeParameters);
            PrincipalPrivileges principalPrivileges = MetastoreUtil.buildInitialPrivilegeSet(localTable.getOwner());
            HiveBasicStatistics basicStatistics = localTable.getPartitionColumns().isEmpty() ? HiveBasicStatistics.createZeroStatistics() : HiveBasicStatistics.createEmptyStatistics();
            metastore.createTable(session, localTable, principalPrivileges, Optional.empty(), ignoreExisting, new PartitionStatistics(basicStatistics, ImmutableMap.of()));
        } catch (RuntimeException ex) {
            throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Error: creating table: %s ", ex.getMessage()), ex);
        }
    });
}
Also used : CarbonTablePath(org.apache.carbondata.core.util.path.CarbonTablePath) Path(org.apache.hadoop.fs.Path) SortingColumn(io.prestosql.plugin.hive.metastore.SortingColumn) Table(io.prestosql.plugin.hive.metastore.Table) CarbonTable(org.apache.carbondata.core.metadata.schema.table.CarbonTable) PrincipalPrivileges(io.prestosql.plugin.hive.metastore.PrincipalPrivileges) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) PrestoException(io.prestosql.spi.PrestoException) HiveBasicStatistics(io.prestosql.plugin.hive.HiveBasicStatistics) SchemaTableName(io.prestosql.spi.connector.SchemaTableName) LocationHandle(io.prestosql.plugin.hive.LocationHandle) HdfsEnvironment(io.prestosql.plugin.hive.HdfsEnvironment) CarbonTableIdentifier(org.apache.carbondata.core.metadata.CarbonTableIdentifier) AbsoluteTableIdentifier(org.apache.carbondata.core.metadata.AbsoluteTableIdentifier) PartitionStatistics(io.prestosql.plugin.hive.PartitionStatistics) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException) BaseStorageFormat(io.prestosql.plugin.hive.BaseStorageFormat) HiveColumnHandle(io.prestosql.plugin.hive.HiveColumnHandle)

Example 3 with SchemaNotFoundException

use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.

the class InMemoryThriftMetastore method alterDatabase.

@Override
public synchronized void alterDatabase(HiveIdentity identity, String databaseName, Database newDatabase) {
    String newDatabaseName = newDatabase.getName();
    if (databaseName.equals(newDatabaseName)) {
        if (databases.replace(databaseName, newDatabase) == null) {
            throw new SchemaNotFoundException(databaseName);
        }
        return;
    }
    Database database = databases.get(databaseName);
    if (database == null) {
        throw new SchemaNotFoundException(databaseName);
    }
    if (databases.putIfAbsent(newDatabaseName, database) != null) {
        throw new SchemaAlreadyExistsException(newDatabaseName);
    }
    databases.remove(databaseName);
    rewriteKeys(relations, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
    rewriteKeys(views, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
    rewriteKeys(partitions, name -> name.withSchemaName(newDatabaseName));
    rewriteKeys(tablePrivileges, name -> name.withDatabase(newDatabaseName));
}
Also used : SchemaAlreadyExistsException(io.prestosql.spi.connector.SchemaAlreadyExistsException) Database(org.apache.hadoop.hive.metastore.api.Database) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException) SchemaTableName(io.prestosql.spi.connector.SchemaTableName)

Example 4 with SchemaNotFoundException

use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.

the class DataCenterClient method getTableNames.

/**
 * Get table names from the remote data center.
 *
 * @param catalog catalog name.
 * @param schema schema name.
 * @return tables form remote data center's schema
 */
public Set<String> getTableNames(String catalog, String schema) {
    String query = "SHOW TABLES FROM " + catalog + SPLIT_DOT + schema;
    try {
        Iterable<List<Object>> data = getResults(clientSession, query);
        Set<String> tableNames = new HashSet<>();
        for (List<Object> row : data) {
            tableNames.add(row.get(0).toString());
        }
        return tableNames;
    } catch (SQLException ex) {
        throw new SchemaNotFoundException(catalog + SPLIT_DOT + schema, "Hetu DC connector failed to get table name");
    }
}
Also used : SQLException(java.sql.SQLException) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException) HashSet(java.util.HashSet)

Example 5 with SchemaNotFoundException

use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.

the class HetuFsMetastore method getTable.

@Override
public Optional<TableEntity> getTable(String catalogName, String databaseName, String table) {
    checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
    checkArgument(databaseName.matches("[\\p{Alnum}_]+"), "Invalid database name");
    checkArgument(table.matches("[\\p{Alnum}_]+"), "Invalid table name");
    try {
        assertCatalogExist(catalogName);
        assertDatabaseExist(catalogName, databaseName);
        assertTableExist(catalogName, databaseName, table);
    } catch (CatalogNotFoundException | SchemaNotFoundException | TableNotFoundException e) {
        return Optional.empty();
    }
    try (InputStream inputStream = client.newInputStream(getTableMetadataPath(catalogName, databaseName, table))) {
        String tableJson = CharStreams.toString(new InputStreamReader(inputStream, UTF_8));
        return Optional.of(TABLE_CODEC.fromJson(tableJson));
    } catch (IOException e) {
        return Optional.empty();
    }
}
Also used : TableNotFoundException(io.prestosql.spi.connector.TableNotFoundException) CatalogNotFoundException(io.prestosql.spi.connector.CatalogNotFoundException) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) SchemaNotFoundException(io.prestosql.spi.connector.SchemaNotFoundException) IOException(java.io.IOException)

Aggregations

SchemaNotFoundException (io.prestosql.spi.connector.SchemaNotFoundException)12 PrestoException (io.prestosql.spi.PrestoException)6 ImmutableList (com.google.common.collect.ImmutableList)4 SchemaTableName (io.prestosql.spi.connector.SchemaTableName)4 ArrayList (java.util.ArrayList)4 List (java.util.List)4 CatalogNotFoundException (io.prestosql.spi.connector.CatalogNotFoundException)3 SchemaAlreadyExistsException (io.prestosql.spi.connector.SchemaAlreadyExistsException)3 IOException (java.io.IOException)3 InputStream (java.io.InputStream)3 InputStreamReader (java.io.InputStreamReader)3 AmazonServiceException (com.amazonaws.AmazonServiceException)2 JsonCodec (io.airlift.json.JsonCodec)2 BaseStorageFormat (io.prestosql.plugin.hive.BaseStorageFormat)2 HdfsEnvironment (io.prestosql.plugin.hive.HdfsEnvironment)2 HiveBasicStatistics (io.prestosql.plugin.hive.HiveBasicStatistics)2 HiveColumnHandle (io.prestosql.plugin.hive.HiveColumnHandle)2 Database (io.prestosql.plugin.hive.metastore.Database)2 TableAlreadyExistsException (io.prestosql.spi.connector.TableAlreadyExistsException)2 TableNotFoundException (io.prestosql.spi.connector.TableNotFoundException)2