use of org.apache.commons.dbutils.ResultSetHandler in project metacat by Netflix.
the class HiveConnectorFastPartitionService method getPartitionKeys.
private List<FieldSchema> getPartitionKeys(final String databaseName, final String tableName) {
// Get data source
final DataSource dataSource = DataSourceManager.get().get(catalogName);
final ResultSetHandler<List<FieldSchema>> handler = rs -> {
final List<FieldSchema> result = Lists.newArrayList();
while (rs.next()) {
final String name = rs.getString("pkey_name");
final String type = rs.getString("pkey_type");
result.add(new FieldSchema(name, type, null));
}
return result;
};
try (Connection conn = dataSource.getConnection()) {
return new QueryRunner().query(conn, SQL_GET_PARTITION_KEYS, handler, databaseName, tableName);
} catch (SQLException e) {
throw Throwables.propagate(e);
}
}
use of org.apache.commons.dbutils.ResultSetHandler in project metacat by Netflix.
the class HiveConnectorFastTableService method getTableNames.
@Override
public Map<String, List<QualifiedName>> getTableNames(@Nonnull final ConnectorContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
final long start = registry.clock().monotonicTime();
final Map<String, String> tags = new HashMap<String, String>();
tags.put("request", HiveMetrics.getTableNames.name());
final Map<String, List<QualifiedName>> result = Maps.newHashMap();
// Get data source
final DataSource dataSource = DataSourceManager.get().get(catalogName);
// Create the sql
final StringBuilder queryBuilder = new StringBuilder(SQL_GET_TABLE_NAMES_BY_URI);
final List<String> params = Lists.newArrayList();
if (prefixSearch) {
queryBuilder.append(" and (1=0");
uris.forEach(uri -> {
queryBuilder.append(" or location like ?");
params.add(uri + "%");
});
queryBuilder.append(" )");
} else {
queryBuilder.append(" and location in (");
uris.forEach(uri -> {
queryBuilder.append("?,");
params.add(uri);
});
queryBuilder.deleteCharAt(queryBuilder.length() - 1).append(")");
}
// Handler for reading the result set
ResultSetHandler<Map<String, List<QualifiedName>>> handler = rs -> {
while (rs.next()) {
final String schemaName = rs.getString("schema_name");
final String tableName = rs.getString("table_name");
final String uri = rs.getString("location");
List<QualifiedName> names = result.get(uri);
if (names == null) {
names = Lists.newArrayList();
result.put(uri, names);
}
names.add(QualifiedName.ofTable(catalogName, schemaName, tableName));
}
return result;
};
try (Connection conn = dataSource.getConnection()) {
new QueryRunner().query(conn, queryBuilder.toString(), handler, params.toArray());
} catch (SQLException e) {
throw Throwables.propagate(e);
} finally {
final long duration = registry.clock().monotonicTime() - start;
log.debug("### Time taken to complete getTableNames is {} ms", duration);
this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
}
return result;
}
use of org.apache.commons.dbutils.ResultSetHandler in project metacat by Netflix.
the class MysqlUserMetadataService method _getMetadataMap.
@SuppressWarnings("checkstyle:methodname")
private Map<String, ObjectNode> _getMetadataMap(@Nullable final List<?> keys, final String sql) {
final Map<String, ObjectNode> result = Maps.newHashMap();
if (keys == null || keys.isEmpty()) {
return result;
}
final List<String> paramVariables = keys.stream().map(s -> "?").collect(Collectors.toList());
final String[] aKeys = keys.stream().map(Object::toString).toArray(String[]::new);
final String query = String.format(sql, Joiner.on(",").join(paramVariables));
final Connection connection = DBUtil.getReadConnection(poolingDataSource);
try {
final ResultSetHandler<Void> handler = resultSet -> {
while (resultSet.next()) {
final String json = resultSet.getString("data");
final String name = resultSet.getString("name");
if (json != null) {
try {
result.put(name, metacatJson.parseJsonObject(json));
} catch (MetacatJsonException e) {
log.error("Invalid json '{}' for name '{}'", json, name);
throw new UserMetadataServiceException(String.format("Invalid json %s for name %s", json, name), e);
}
}
}
return null;
};
new QueryRunner().query(connection, query, handler, (Object[]) aKeys);
} catch (SQLException e) {
log.error("Sql exception", e);
throw new UserMetadataServiceException(String.format("Failed to get data for %s", keys), e);
} finally {
DBUtil.closeReadConnection(connection);
}
return result;
}
use of org.apache.commons.dbutils.ResultSetHandler in project metacat by Netflix.
the class MysqlUserMetadataService method searchByOwners.
@Override
public List<QualifiedName> searchByOwners(final Set<String> owners) {
final List<QualifiedName> result = Lists.newArrayList();
final StringBuilder query = new StringBuilder(SQL.SEARCH_DEFINITION_METADATA_NAMES);
final List<Object> paramList = Lists.newArrayList();
query.append(" where 1=0");
owners.forEach(s -> {
query.append(" or data like ?");
paramList.add("%\"userId\":\"" + s.trim() + "\"%");
});
final Object[] params = new Object[paramList.size()];
final Connection connection = DBUtil.getReadConnection(poolingDataSource);
try {
// Handler for reading the result set
final ResultSetHandler<Void> handler = rs -> {
while (rs.next()) {
final String definitionName = rs.getString("name");
result.add(QualifiedName.fromString(definitionName, false));
}
return null;
};
new QueryRunner().query(connection, query.toString(), handler, paramList.toArray(params));
} catch (SQLException e) {
log.error("Sql exception", e);
throw new UserMetadataServiceException("Failed to get definition data", e);
} finally {
DBUtil.closeReadConnection(connection);
}
return result;
}
use of org.apache.commons.dbutils.ResultSetHandler in project metacat by Netflix.
the class MysqlUserMetadataService method searchDefinitionMetadatas.
@Override
public List<DefinitionMetadataDto> searchDefinitionMetadatas(final Set<String> propertyNames, final String type, final String name, final String sortBy, final String sortOrder, final Integer offset, final Integer limit) {
final List<DefinitionMetadataDto> result = Lists.newArrayList();
final StringBuilder query = new StringBuilder(SQL.SEARCH_DEFINITION_METADATAS);
final List<Object> paramList = Lists.newArrayList();
if (type != null) {
String typeRegex = null;
switch(type) {
case "database":
typeRegex = "^[^/]*/[^/]*$";
break;
case "table":
typeRegex = "^[^/]*/[^/]*/[^/]*$";
break;
case "partition":
typeRegex = "^[^/]*/[^/]*/[^/]*/.*$";
break;
default:
}
if (typeRegex != null) {
query.append(" and name rlike ?");
paramList.add(typeRegex);
}
}
if (propertyNames != null && !propertyNames.isEmpty()) {
propertyNames.forEach(propertyName -> {
query.append(" and data like ?");
paramList.add("%\"" + propertyName + "\":%");
});
}
if (!Strings.isNullOrEmpty(name)) {
query.append(" and name like ?");
paramList.add(name);
}
if (!Strings.isNullOrEmpty(sortBy)) {
query.append(" order by ").append(sortBy);
if (!Strings.isNullOrEmpty(sortOrder)) {
query.append(" ").append(sortOrder);
}
}
if (limit != null) {
query.append(" limit ");
if (offset != null) {
query.append(offset).append(",");
}
query.append(limit);
}
final Object[] params = new Object[paramList.size()];
final Connection connection = DBUtil.getReadConnection(poolingDataSource);
try {
// Handler for reading the result set
final ResultSetHandler<Void> handler = rs -> {
while (rs.next()) {
final String definitionName = rs.getString("name");
final String data = rs.getString("data");
final DefinitionMetadataDto definitionMetadataDto = new DefinitionMetadataDto();
definitionMetadataDto.setName(QualifiedName.fromString(definitionName));
definitionMetadataDto.setDefinitionMetadata(metacatJson.parseJsonObject(data));
result.add(definitionMetadataDto);
}
return null;
};
new QueryRunner().query(connection, query.toString(), handler, paramList.toArray(params));
} catch (SQLException e) {
log.error("Sql exception", e);
throw new UserMetadataServiceException("Failed to get definition data", e);
} finally {
DBUtil.closeReadConnection(connection);
}
return result;
}
Aggregations