use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.
the class CarbondataMetadata method updateEmptyCarbondataTableStorePath.
private void updateEmptyCarbondataTableStorePath(ConnectorSession session, String schemaName) throws IOException {
FileSystem fileSystem;
String targetLocation;
if (StringUtils.isEmpty(carbondataTableStore)) {
Database database = metastore.getDatabase(defaultDBName).orElseThrow(() -> new SchemaNotFoundException(defaultDBName));
String tableStore = database.getLocation().get();
/* if path not having prefix with filesystem type, than we will take fileSystem type (ex:hdfs,file:) from core-site.xml using below methods */
fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session, schemaName), new Path(tableStore));
targetLocation = fileSystem.getFileStatus(new Path(tableStore)).getPath().toString();
carbondataTableStore = targetLocation.endsWith(File.separator) ? (targetLocation + carbondataStorageFolderName) : (targetLocation + File.separator + carbondataStorageFolderName);
} else {
fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session, schemaName), new Path(carbondataTableStore));
carbondataTableStore = fileSystem.getFileStatus(new Path(carbondataTableStore)).getPath().toString();
}
}
use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.
the class CarbondataMetadata method createTable.
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) {
SchemaTableName localSchemaTableName = tableMetadata.getTable();
String localSchemaName = localSchemaTableName.getSchemaName();
String tableName = localSchemaTableName.getTableName();
this.user = session.getUser();
this.schemaName = localSchemaName;
currentState = State.CREATE_TABLE;
List<String> partitionedBy = new ArrayList<String>();
List<SortingColumn> sortBy = new ArrayList<SortingColumn>();
List<HiveColumnHandle> columnHandles = new ArrayList<HiveColumnHandle>();
Map<String, String> tableProperties = new HashMap<String, String>();
getParametersForCreateTable(session, tableMetadata, partitionedBy, sortBy, columnHandles, tableProperties);
metastore.getDatabase(localSchemaName).orElseThrow(() -> new SchemaNotFoundException(localSchemaName));
BaseStorageFormat hiveStorageFormat = CarbondataTableProperties.getCarbondataStorageFormat(tableMetadata.getProperties());
// it will get final path to create carbon table
LocationHandle locationHandle = getCarbonDataTableCreationPath(session, tableMetadata, HiveWriteUtils.OpertionType.CREATE_TABLE);
Path targetPath = locationService.getQueryWriteInfo(locationHandle).getTargetPath();
AbsoluteTableIdentifier finalAbsoluteTableIdentifier = AbsoluteTableIdentifier.from(targetPath.toString(), new CarbonTableIdentifier(localSchemaName, tableName, UUID.randomUUID().toString()));
hdfsEnvironment.doAs(session.getUser(), () -> {
initialConfiguration = ConfigurationUtils.toJobConf(this.hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session, localSchemaName, tableName), new Path(locationHandle.getJsonSerializableTargetPath())));
CarbondataMetadataUtils.createMetaDataFolderSchemaFile(hdfsEnvironment, session, columnHandles, finalAbsoluteTableIdentifier, partitionedBy, sortBy.stream().map(s -> s.getColumnName().toLowerCase(Locale.ENGLISH)).collect(toList()), targetPath.toString(), initialConfiguration);
this.tableStorageLocation = Optional.of(targetPath.toString());
try {
Map<String, String> serdeParameters = initSerDeProperties(tableName);
Table localTable = buildTableObject(session.getQueryId(), localSchemaName, tableName, session.getUser(), columnHandles, hiveStorageFormat, partitionedBy, Optional.empty(), tableProperties, targetPath, // carbon table is set as external table
true, prestoVersion, serdeParameters);
PrincipalPrivileges principalPrivileges = MetastoreUtil.buildInitialPrivilegeSet(localTable.getOwner());
HiveBasicStatistics basicStatistics = localTable.getPartitionColumns().isEmpty() ? HiveBasicStatistics.createZeroStatistics() : HiveBasicStatistics.createEmptyStatistics();
metastore.createTable(session, localTable, principalPrivileges, Optional.empty(), ignoreExisting, new PartitionStatistics(basicStatistics, ImmutableMap.of()));
} catch (RuntimeException ex) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Error: creating table: %s ", ex.getMessage()), ex);
}
});
}
use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.
the class InMemoryThriftMetastore method alterDatabase.
@Override
public synchronized void alterDatabase(HiveIdentity identity, String databaseName, Database newDatabase) {
String newDatabaseName = newDatabase.getName();
if (databaseName.equals(newDatabaseName)) {
if (databases.replace(databaseName, newDatabase) == null) {
throw new SchemaNotFoundException(databaseName);
}
return;
}
Database database = databases.get(databaseName);
if (database == null) {
throw new SchemaNotFoundException(databaseName);
}
if (databases.putIfAbsent(newDatabaseName, database) != null) {
throw new SchemaAlreadyExistsException(newDatabaseName);
}
databases.remove(databaseName);
rewriteKeys(relations, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
rewriteKeys(views, name -> new SchemaTableName(newDatabaseName, name.getTableName()));
rewriteKeys(partitions, name -> name.withSchemaName(newDatabaseName));
rewriteKeys(tablePrivileges, name -> name.withDatabase(newDatabaseName));
}
use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.
the class DataCenterClient method getTableNames.
/**
* Get table names from the remote data center.
*
* @param catalog catalog name.
* @param schema schema name.
* @return tables form remote data center's schema
*/
public Set<String> getTableNames(String catalog, String schema) {
String query = "SHOW TABLES FROM " + catalog + SPLIT_DOT + schema;
try {
Iterable<List<Object>> data = getResults(clientSession, query);
Set<String> tableNames = new HashSet<>();
for (List<Object> row : data) {
tableNames.add(row.get(0).toString());
}
return tableNames;
} catch (SQLException ex) {
throw new SchemaNotFoundException(catalog + SPLIT_DOT + schema, "Hetu DC connector failed to get table name");
}
}
use of io.prestosql.spi.connector.SchemaNotFoundException in project hetu-core by openlookeng.
the class HetuFsMetastore method getTable.
@Override
public Optional<TableEntity> getTable(String catalogName, String databaseName, String table) {
checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
checkArgument(databaseName.matches("[\\p{Alnum}_]+"), "Invalid database name");
checkArgument(table.matches("[\\p{Alnum}_]+"), "Invalid table name");
try {
assertCatalogExist(catalogName);
assertDatabaseExist(catalogName, databaseName);
assertTableExist(catalogName, databaseName, table);
} catch (CatalogNotFoundException | SchemaNotFoundException | TableNotFoundException e) {
return Optional.empty();
}
try (InputStream inputStream = client.newInputStream(getTableMetadataPath(catalogName, databaseName, table))) {
String tableJson = CharStreams.toString(new InputStreamReader(inputStream, UTF_8));
return Optional.of(TABLE_CODEC.fromJson(tableJson));
} catch (IOException e) {
return Optional.empty();
}
}
Aggregations